hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3374634092151a162ca5f82b9c757fbceca8074e
| 966
|
py
|
Python
|
qq/util.py
|
oeg-upm/tada-qq
|
ad8441093e21f4d598893cc2a263b128a7782e29
|
[
"Apache-2.0"
] | 2
|
2022-01-26T08:48:34.000Z
|
2022-01-26T08:48:37.000Z
|
qq/util.py
|
oeg-upm/tada-qq
|
ad8441093e21f4d598893cc2a263b128a7782e29
|
[
"Apache-2.0"
] | null | null | null |
qq/util.py
|
oeg-upm/tada-qq
|
ad8441093e21f4d598893cc2a263b128a7782e29
|
[
"Apache-2.0"
] | null | null | null |
def errors_mean(y_pred, y_real):
"""
:param y_pred: list of predicted
:param y_real: list of real
:return:
"""
if len(y_pred) != len(y_real):
print("Error, unmatched number of ys")
return None
tot_err = 0.0
for i in range(len(y_pred)):
tot_err += abs(y_pred[i]-y_real[i])
mean_tot_err = tot_err/len(y_pred)
# print("total error: "+str(tot_err))
# print("mean error: "+str(mean_tot_err))
return mean_tot_err
def errors_sq_mean(y_pred, y_real):
"""
:param y_pred: list of predicted
:param y_real: list of real
:return:
"""
if len(y_pred) != len(y_real):
print("Error, unmatched number of ys")
return None
tot_err = 0.0
for i in range(len(y_pred)):
tot_err += (y_pred[i]-y_real[i]) ** 2
mean_tot_err = tot_err/len(y_pred)
# print("total error: "+str(tot_err))
# print("mean error: "+str(mean_tot_err))
return mean_tot_err
| 26.833333
| 46
| 0.60352
| 161
| 966
| 3.354037
| 0.192547
| 0.155556
| 0.088889
| 0.037037
| 0.955556
| 0.955556
| 0.911111
| 0.911111
| 0.911111
| 0.911111
| 0
| 0.006974
| 0.257764
| 966
| 35
| 47
| 27.6
| 0.746165
| 0.302277
| 0
| 0.777778
| 0
| 0
| 0.093098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.333333
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
688b92d43b85b20d610155bffb1e7ed6ee5fb722
| 140
|
py
|
Python
|
python/testData/inspections/AddCallSuperRepeatedOptionalParamsPassedToSuperConstructor_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/AddCallSuperRepeatedOptionalParamsPassedToSuperConstructor_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/AddCallSuperRepeatedOptionalParamsPassedToSuperConstructor_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class A:
def __init__(self, a, b=2, c=3):
self.a = a
class B(A):
def __init__(self, a, c):
A.__init__(self, a, c)
| 15.555556
| 36
| 0.507143
| 26
| 140
| 2.269231
| 0.346154
| 0.338983
| 0.457627
| 0.40678
| 0.440678
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021053
| 0.321429
| 140
| 8
| 37
| 17.5
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
cc314f24ca7786bde21cb45c84f3c4cd940469a6
| 935
|
py
|
Python
|
torchsparse/nn/functional/pooling.py
|
f-sky/torchsparse
|
65466a10c6fa54bff17c6429706b7019a2a59409
|
[
"MIT"
] | 1
|
2021-03-16T02:47:56.000Z
|
2021-03-16T02:47:56.000Z
|
torchsparse/nn/functional/pooling.py
|
f-sky/torchsparse
|
65466a10c6fa54bff17c6429706b7019a2a59409
|
[
"MIT"
] | null | null | null |
torchsparse/nn/functional/pooling.py
|
f-sky/torchsparse
|
65466a10c6fa54bff17c6429706b7019a2a59409
|
[
"MIT"
] | null | null | null |
import torch
__all__ = ['global_avg_pool', 'global_max_pool']
def global_avg_pool(inputs):
batch_index = inputs.C[:, -1]
max_index = torch.max(batch_index).item()
outputs = []
for i in range(max_index + 1):
cur_inputs = torch.index_select(inputs.F, 0,
torch.where(batch_index == i)[0])
cur_outputs = cur_inputs.mean(0).unsqueeze(0)
outputs.append(cur_outputs)
outputs = torch.cat(outputs, 0)
return outputs
def global_max_pool(inputs):
batch_index = inputs.C[:, -1]
max_index = torch.max(batch_index).item()
outputs = []
for i in range(max_index + 1):
cur_inputs = torch.index_select(inputs.F, 0,
torch.where(batch_index == i)[0])
cur_outputs = cur_inputs.max(0)[0].unsqueeze(0)
outputs.append(cur_outputs)
outputs = torch.cat(outputs, 0)
return outputs
| 31.166667
| 73
| 0.60107
| 126
| 935
| 4.206349
| 0.230159
| 0.113208
| 0.049057
| 0.075472
| 0.864151
| 0.864151
| 0.864151
| 0.864151
| 0.864151
| 0.864151
| 0
| 0.022124
| 0.274866
| 935
| 29
| 74
| 32.241379
| 0.759587
| 0
| 0
| 0.75
| 0
| 0
| 0.032086
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
04539ca74be12ef99bae96a07df51be6d2ca1d74
| 84
|
py
|
Python
|
app/datasets/__init__.py
|
ahmedassal/GAN_SASS_TF
|
5fda7e8ecf5c8ccc15eef47151bd2891d91737b4
|
[
"MIT"
] | 3
|
2021-03-21T06:59:44.000Z
|
2022-03-13T12:26:04.000Z
|
app/datasets/__init__.py
|
ahmedassal/GAN_SASS_TF
|
5fda7e8ecf5c8ccc15eef47151bd2891d91737b4
|
[
"MIT"
] | null | null | null |
app/datasets/__init__.py
|
ahmedassal/GAN_SASS_TF
|
5fda7e8ecf5c8ccc15eef47151bd2891d91737b4
|
[
"MIT"
] | null | null | null |
import app.hparams as hparams
import app.datasets.dataset
import app.datasets.timit
| 21
| 29
| 0.845238
| 13
| 84
| 5.461538
| 0.538462
| 0.380282
| 0.478873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 84
| 3
| 30
| 28
| 0.934211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f09d671aca8f25520f94a1429ea94c00340d4dba
| 31
|
py
|
Python
|
mlx90614/__init__.py
|
sbbean/mlx90614
|
c700518023c62af66e0abe4a2ee0bfda94ef45d9
|
[
"MIT"
] | 9
|
2020-10-10T03:39:29.000Z
|
2021-12-09T08:54:56.000Z
|
mlx90614/__init__.py
|
sbbean/mlx90614
|
c700518023c62af66e0abe4a2ee0bfda94ef45d9
|
[
"MIT"
] | 3
|
2020-10-07T09:37:17.000Z
|
2022-02-06T02:36:36.000Z
|
mlx90614/__init__.py
|
sbbean/mlx90614
|
c700518023c62af66e0abe4a2ee0bfda94ef45d9
|
[
"MIT"
] | 4
|
2020-09-08T17:02:14.000Z
|
2021-04-27T09:59:37.000Z
|
from .mlx90614 import MLX90614
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.37037
| 0.129032
| 31
| 1
| 31
| 31
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f0a83f96a5084d479b7f8d583ddc2e985b047671
| 14,460
|
py
|
Python
|
tests/test_sorting.py
|
No9005/tcheasy
|
35dbf27ef00b4a6e17cd085eeda0868c83d354ef
|
[
"MIT"
] | null | null | null |
tests/test_sorting.py
|
No9005/tcheasy
|
35dbf27ef00b4a6e17cd085eeda0868c83d354ef
|
[
"MIT"
] | null | null | null |
tests/test_sorting.py
|
No9005/tcheasy
|
35dbf27ef00b4a6e17cd085eeda0868c83d354ef
|
[
"MIT"
] | null | null | null |
"""
Unittests for the parameter sorting function
"""
# imports
import unittest
from tcheasy.sort_parameters import sort_parameters
# create test class
class TestSortParameters(unittest.TestCase):
"""
methods:
--------
setUp
Setup method
tearDown
Teardown method
test_only_positional
Tests sorting of positionals
test_only_args
Tests sorting for *args
test_mixed
Tests sorting of mixed types
"""
#region 'setup & teardown' -------------------
def setUp(self) -> None:
"""setsUp the test class """
return super().setUp()
def tearDown(self) -> None:
return super().tearDown()
#endregion
#region 'tests' ------------------------------
def test_only_positional(self):
"""
Checks a function with only positional
parameters.
"""
#region 'no defaults, no hints'
""" should return only elements for positional """
# build function
def example(a, b, c):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(1,2,3), {
'positional':{'a':1, 'b':2, 'c':3},
'args':[],
'kwargs':{},
'hinting':{},
'declared':["a","b","c"],
'self':{'available':False, 'value':None}
})
#endregion
#region 'some defaults (use them during call), no hints'
""" should return only elements for positional """
# build function
def example(a, b, c=10):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(1, b=2), {
'positional':{'a':1, 'b':2, 'c':10},
'args':[],
'kwargs':{},
'hinting':{},
'declared':["a","b","c"],
'self':{'available':False, 'value':None}
})
#endregion
#region 'some defaults (do not use them), no hints'
""" should return only elements for positional """
# build function
def example(a, b, c=10):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(1, b=2, c=50), {
'positional':{'a':1, 'b':2, 'c':50},
'args':[],
'kwargs':{},
'hinting':{},
'declared':["a","b","c"],
'self':{'available':False, 'value':None}
})
#endregion
#region 'no defaults, some hints'
""" should return only elements for positional
'None' should change to the 'any'.
"""
# build function
def example(a, b:str, c:int):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(1, 2, c=3), {
'positional':{'a':1, 'b':2, 'c':3},
'args':[],
'kwargs':{},
'hinting':{'a':(type(None), int, float, complex, bool, str, list, tuple, dict, set, object), 'b':str, 'c':int},
'declared':["a","b","c"],
'self':{'available':False, 'value':None}
})
#endregion
#region 'some defaults (use them), some hints'
""" should return only elements for positional
'None' should change to the 'any'.
"""
# build function
def example(a, b:str, c:int = 1):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(1, "apple")['positional'], {'a':1, 'b':"apple", 'c':1})
#endregion
#region 'some defaults (do not use them), some hints'
""" should return only elements for positional
'None' should change to the 'any'.
"""
# build function
def example(a, b:str, c:int = 1):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(1, b="apple", c=15)['positional'], {'a':1, 'b':"apple", 'c':15})
#endregion
#region 'for class method'
# build class
class TestClass:
def test_method(self:int, a, b:int, c:bool = True) -> dict:
# get locals
loc = locals()
# run sorting
result = sort_parameters(self.test_method, loc, False)
return result
# create class case
case = TestClass()
# run function
result = case.test_method("123", 123, False)
self.assertEqual(result['positional'], {'a':"123", 'b':123, 'c':False})
self.assertEqual(result['self']['available'], True)
#endregion
def test_only_args(self):
"""
Checks a function with only *args.
"""
#region 'without hints'
# build function
def example(*args):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(100,200), {
'positional':{},
'args':(100, 200),
'kwargs':{},
'hinting':{},
'declared':['args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'with hints'
""" should not add elements to hinted. """
# build function
def example(*args:int):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(100,200), {
'positional':{},
'args':(100, 200),
'kwargs':{},
'hinting':{},
'declared':['args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'test class method'
# build class
class TestClass:
def __init__(self):
self.attribute = "attribute"
def test_method(self, *args) -> dict:
# get locals
loc = locals()
# run sorting
result = sort_parameters(self.test_method, loc, False)
return result
# create class case
case = TestClass()
# run function
result = case.test_method("123", 123, False)
self.assertEqual(result['args'], ("123", 123, False))
self.assertEqual(result['self']['available'], True)
#endregion
def test_only_kwargs(self):
"""Tests a function with only **kwargs """
#region 'without hints'
# build function
def example(**kwargs):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(c=100,pp="something", theta=.1), {
'positional':{},
'args':[],
'kwargs':{'c':100,'pp':"something", 'theta':.1},
'hinting':{},
'declared':['kwargs'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'with hints'
""" should not add elements to hinted. """
# build function
def example(**kwargs:int):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(a=100,z=200), {
'positional':{},
'args':[],
'kwargs':{'a':100, 'z':200},
'hinting':{},
'declared':['kwargs'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'test class method'
# build class
class TestClass:
def test_method(self, **kwargs) -> dict:
# get locals
loc = locals()
# run sorting
result = sort_parameters(self.test_method, loc, False)
return result
# create class case
case = TestClass()
# run function
result = case.test_method(z="123", p=123, q=False, y={})
self.assertEqual(result['kwargs'], {'z':"123", 'p':123, 'q':False, 'y':{}})
self.assertTrue(result['self']['available'])
#endregion
def test_mixed(self):
"""Tests a mixed function declaration """
#region 'without defaults, without hints'
# build function
def example(a, b, c, *args, **kwargs):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(100,200,300,400,k="something"), {
'positional':{'a':100, 'b':200, 'c':300},
'args':(400,),
'kwargs':{'k':"something"},
'hinting':{},
'declared':['a', 'b', 'c', 'kwargs', 'args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'some defaults (not using it), without hints'
# build function
def example(a, b="something", c="again", *args, **kwargs):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(100,200,300,400,k="something"), {
'positional':{'a':100, 'b':200, 'c':300},
'args':(400,),
'kwargs':{'k':"something"},
'hinting':{},
'declared':['a', 'b', 'c', 'kwargs', 'args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'some defaults (using it), without hints, mixed order'
# build function
def example(a, b="something", c="again", *args, **kwargs):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(100, p=13, c=15, k="something"), {
'positional':{'a':100, 'b':"something", 'c':15},
'args':(),
'kwargs':{'k':"something", 'p':13},
'hinting':{},
'declared':['a', 'b', 'c', 'kwargs', 'args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'some defaults (using it), without hints, normal order'
# build function
def example(a, b="something", c="again", *args, **kwargs):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(a=100, p=13, k="something"), {
'positional':{'a':100, 'b':"something", 'c':'again'},
'args':(),
'kwargs':{'k':"something", 'p':13},
'hinting':{},
'declared':['a', 'b', 'c', 'kwargs', 'args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'some defaults (not using it), without hints, mixed order'
# build function
def example(a, b="something", c="again", *args, **kwargs):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(a=100, p=15, c="yes", b="no", k="something"), {
'positional':{'a':100, 'b':"no", 'c':'yes'},
'args':(),
'kwargs':{'k':"something", 'p':15},
'hinting':{},
'declared':['a', 'b', 'c', 'kwargs', 'args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'no defaults, with hints'
# build function
def example(a:int, b:None, c:dict, *args:float, **kwargs):
# get locals
loc = locals()
# run sorting
result = sort_parameters(example, loc, False)
return result
# call it
self.assertEqual(example(100,200,400,500,500,500,Z="none"), {
'positional':{'a':100, 'b':200, 'c':400},
'args':(500,500,500,),
'kwargs':{'Z':"none"},
'hinting':{'a':int, 'b':None, 'c':dict},
'declared':['a', 'b', 'c', 'kwargs', 'args'],
'self':{'available':False, 'value':None}
})
#endregion
#region 'test class method'
# build class
class TestClass:
def test_method(self, a:int, *args:float, **kwargs) -> dict:
# get locals
loc = locals()
# run sorting
result = sort_parameters(self.test_method, loc, False)
return result
# create class case
case = TestClass()
# run function
result = case.test_method(5, True, z="123", p=123, q=False, y={})
self.assertEqual(result['kwargs'], {'z':"123", 'p':123, 'q':False, 'y':{}})
self.assertEqual(result['args'], (True,))
self.assertEqual(result['positional'], {'a':5})
self.assertEqual(result['declared'], ["a", "kwargs", "args"])
self.assertTrue(result['self']['available'])
#endregion
| 25.235602
| 123
| 0.474827
| 1,430
| 14,460
| 4.766434
| 0.085315
| 0.055018
| 0.035211
| 0.052817
| 0.852993
| 0.839349
| 0.787265
| 0.775088
| 0.752934
| 0.75
| 0
| 0.027891
| 0.372683
| 14,460
| 572
| 124
| 25.27972
| 0.723515
| 0.173167
| 0
| 0.752174
| 0
| 0
| 0.118091
| 0
| 0
| 0
| 0
| 0
| 0.117391
| 1
| 0.117391
| false
| 0
| 0.008696
| 0.004348
| 0.243478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0b62e7952fd955cca10bc9a7ef72fa22f33f246
| 97
|
py
|
Python
|
purchasing/opportunities/__init__.py
|
hamhands/pittsburgh-purchasing-suite
|
a79aa77c00c95da8f0b3e2f5f7f7143d5857de35
|
[
"BSD-3-Clause"
] | 22
|
2015-05-08T15:30:42.000Z
|
2021-04-24T20:26:32.000Z
|
purchasing/opportunities/__init__.py
|
hamhands/pittsburgh-purchasing-suite
|
a79aa77c00c95da8f0b3e2f5f7f7143d5857de35
|
[
"BSD-3-Clause"
] | 516
|
2015-04-23T18:14:40.000Z
|
2017-11-08T19:27:41.000Z
|
purchasing/opportunities/__init__.py
|
CityofPittsburgh/pittsburgh-purchasing-suite
|
d676ed9c137e5aaa100992a798acd60ac464a2c1
|
[
"BSD-3-Clause"
] | 10
|
2015-07-08T19:00:10.000Z
|
2021-03-15T18:56:54.000Z
|
# -*- coding: utf-8 -*-
from .admin import blueprint as abp
from .front import blueprint as fbp
| 19.4
| 35
| 0.701031
| 15
| 97
| 4.533333
| 0.733333
| 0.441176
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012658
| 0.185567
| 97
| 4
| 36
| 24.25
| 0.848101
| 0.216495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
f0e89aa011733e051b2538631b551027de312528
| 11,202
|
py
|
Python
|
tests/test_mp7.py
|
iscc/iscc-sdk
|
329bf7611979cbb064d95902f5b29fb9b8dfc15b
|
[
"Apache-2.0"
] | 1
|
2022-03-21T19:34:10.000Z
|
2022-03-21T19:34:10.000Z
|
tests/test_mp7.py
|
iscc/iscc-sdk
|
329bf7611979cbb064d95902f5b29fb9b8dfc15b
|
[
"Apache-2.0"
] | 23
|
2022-01-24T16:05:33.000Z
|
2022-03-29T10:48:04.000Z
|
tests/test_mp7.py
|
iscc/iscc-sdk
|
329bf7611979cbb064d95902f5b29fb9b8dfc15b
|
[
"Apache-2.0"
] | null | null | null |
from fractions import Fraction
from bitarray import bitarray
from iscc_sdk import mp7
import iscc_sdk as idk
def test_calc_byte_to_bit3():
assert mp7.calc_byte_to_bit3().tolist() == [
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 2],
[0, 0, 0, 1, 0],
[0, 0, 0, 1, 1],
[0, 0, 0, 1, 2],
[0, 0, 0, 2, 0],
[0, 0, 0, 2, 1],
[0, 0, 0, 2, 2],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 1],
[0, 0, 1, 0, 2],
[0, 0, 1, 1, 0],
[0, 0, 1, 1, 1],
[0, 0, 1, 1, 2],
[0, 0, 1, 2, 0],
[0, 0, 1, 2, 1],
[0, 0, 1, 2, 2],
[0, 0, 2, 0, 0],
[0, 0, 2, 0, 1],
[0, 0, 2, 0, 2],
[0, 0, 2, 1, 0],
[0, 0, 2, 1, 1],
[0, 0, 2, 1, 2],
[0, 0, 2, 2, 0],
[0, 0, 2, 2, 1],
[0, 0, 2, 2, 2],
[0, 1, 0, 0, 0],
[0, 1, 0, 0, 1],
[0, 1, 0, 0, 2],
[0, 1, 0, 1, 0],
[0, 1, 0, 1, 1],
[0, 1, 0, 1, 2],
[0, 1, 0, 2, 0],
[0, 1, 0, 2, 1],
[0, 1, 0, 2, 2],
[0, 1, 1, 0, 0],
[0, 1, 1, 0, 1],
[0, 1, 1, 0, 2],
[0, 1, 1, 1, 0],
[0, 1, 1, 1, 1],
[0, 1, 1, 1, 2],
[0, 1, 1, 2, 0],
[0, 1, 1, 2, 1],
[0, 1, 1, 2, 2],
[0, 1, 2, 0, 0],
[0, 1, 2, 0, 1],
[0, 1, 2, 0, 2],
[0, 1, 2, 1, 0],
[0, 1, 2, 1, 1],
[0, 1, 2, 1, 2],
[0, 1, 2, 2, 0],
[0, 1, 2, 2, 1],
[0, 1, 2, 2, 2],
[0, 2, 0, 0, 0],
[0, 2, 0, 0, 1],
[0, 2, 0, 0, 2],
[0, 2, 0, 1, 0],
[0, 2, 0, 1, 1],
[0, 2, 0, 1, 2],
[0, 2, 0, 2, 0],
[0, 2, 0, 2, 1],
[0, 2, 0, 2, 2],
[0, 2, 1, 0, 0],
[0, 2, 1, 0, 1],
[0, 2, 1, 0, 2],
[0, 2, 1, 1, 0],
[0, 2, 1, 1, 1],
[0, 2, 1, 1, 2],
[0, 2, 1, 2, 0],
[0, 2, 1, 2, 1],
[0, 2, 1, 2, 2],
[0, 2, 2, 0, 0],
[0, 2, 2, 0, 1],
[0, 2, 2, 0, 2],
[0, 2, 2, 1, 0],
[0, 2, 2, 1, 1],
[0, 2, 2, 1, 2],
[0, 2, 2, 2, 0],
[0, 2, 2, 2, 1],
[0, 2, 2, 2, 2],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 2],
[1, 0, 0, 1, 0],
[1, 0, 0, 1, 1],
[1, 0, 0, 1, 2],
[1, 0, 0, 2, 0],
[1, 0, 0, 2, 1],
[1, 0, 0, 2, 2],
[1, 0, 1, 0, 0],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 2],
[1, 0, 1, 1, 0],
[1, 0, 1, 1, 1],
[1, 0, 1, 1, 2],
[1, 0, 1, 2, 0],
[1, 0, 1, 2, 1],
[1, 0, 1, 2, 2],
[1, 0, 2, 0, 0],
[1, 0, 2, 0, 1],
[1, 0, 2, 0, 2],
[1, 0, 2, 1, 0],
[1, 0, 2, 1, 1],
[1, 0, 2, 1, 2],
[1, 0, 2, 2, 0],
[1, 0, 2, 2, 1],
[1, 0, 2, 2, 2],
[1, 1, 0, 0, 0],
[1, 1, 0, 0, 1],
[1, 1, 0, 0, 2],
[1, 1, 0, 1, 0],
[1, 1, 0, 1, 1],
[1, 1, 0, 1, 2],
[1, 1, 0, 2, 0],
[1, 1, 0, 2, 1],
[1, 1, 0, 2, 2],
[1, 1, 1, 0, 0],
[1, 1, 1, 0, 1],
[1, 1, 1, 0, 2],
[1, 1, 1, 1, 0],
[1, 1, 1, 1, 1],
[1, 1, 1, 1, 2],
[1, 1, 1, 2, 0],
[1, 1, 1, 2, 1],
[1, 1, 1, 2, 2],
[1, 1, 2, 0, 0],
[1, 1, 2, 0, 1],
[1, 1, 2, 0, 2],
[1, 1, 2, 1, 0],
[1, 1, 2, 1, 1],
[1, 1, 2, 1, 2],
[1, 1, 2, 2, 0],
[1, 1, 2, 2, 1],
[1, 1, 2, 2, 2],
[1, 2, 0, 0, 0],
[1, 2, 0, 0, 1],
[1, 2, 0, 0, 2],
[1, 2, 0, 1, 0],
[1, 2, 0, 1, 1],
[1, 2, 0, 1, 2],
[1, 2, 0, 2, 0],
[1, 2, 0, 2, 1],
[1, 2, 0, 2, 2],
[1, 2, 1, 0, 0],
[1, 2, 1, 0, 1],
[1, 2, 1, 0, 2],
[1, 2, 1, 1, 0],
[1, 2, 1, 1, 1],
[1, 2, 1, 1, 2],
[1, 2, 1, 2, 0],
[1, 2, 1, 2, 1],
[1, 2, 1, 2, 2],
[1, 2, 2, 0, 0],
[1, 2, 2, 0, 1],
[1, 2, 2, 0, 2],
[1, 2, 2, 1, 0],
[1, 2, 2, 1, 1],
[1, 2, 2, 1, 2],
[1, 2, 2, 2, 0],
[1, 2, 2, 2, 1],
[1, 2, 2, 2, 2],
[2, 0, 0, 0, 0],
[2, 0, 0, 0, 1],
[2, 0, 0, 0, 2],
[2, 0, 0, 1, 0],
[2, 0, 0, 1, 1],
[2, 0, 0, 1, 2],
[2, 0, 0, 2, 0],
[2, 0, 0, 2, 1],
[2, 0, 0, 2, 2],
[2, 0, 1, 0, 0],
[2, 0, 1, 0, 1],
[2, 0, 1, 0, 2],
[2, 0, 1, 1, 0],
[2, 0, 1, 1, 1],
[2, 0, 1, 1, 2],
[2, 0, 1, 2, 0],
[2, 0, 1, 2, 1],
[2, 0, 1, 2, 2],
[2, 0, 2, 0, 0],
[2, 0, 2, 0, 1],
[2, 0, 2, 0, 2],
[2, 0, 2, 1, 0],
[2, 0, 2, 1, 1],
[2, 0, 2, 1, 2],
[2, 0, 2, 2, 0],
[2, 0, 2, 2, 1],
[2, 0, 2, 2, 2],
[2, 1, 0, 0, 0],
[2, 1, 0, 0, 1],
[2, 1, 0, 0, 2],
[2, 1, 0, 1, 0],
[2, 1, 0, 1, 1],
[2, 1, 0, 1, 2],
[2, 1, 0, 2, 0],
[2, 1, 0, 2, 1],
[2, 1, 0, 2, 2],
[2, 1, 1, 0, 0],
[2, 1, 1, 0, 1],
[2, 1, 1, 0, 2],
[2, 1, 1, 1, 0],
[2, 1, 1, 1, 1],
[2, 1, 1, 1, 2],
[2, 1, 1, 2, 0],
[2, 1, 1, 2, 1],
[2, 1, 1, 2, 2],
[2, 1, 2, 0, 0],
[2, 1, 2, 0, 1],
[2, 1, 2, 0, 2],
[2, 1, 2, 1, 0],
[2, 1, 2, 1, 1],
[2, 1, 2, 1, 2],
[2, 1, 2, 2, 0],
[2, 1, 2, 2, 1],
[2, 1, 2, 2, 2],
[2, 2, 0, 0, 0],
[2, 2, 0, 0, 1],
[2, 2, 0, 0, 2],
[2, 2, 0, 1, 0],
[2, 2, 0, 1, 1],
[2, 2, 0, 1, 2],
[2, 2, 0, 2, 0],
[2, 2, 0, 2, 1],
[2, 2, 0, 2, 2],
[2, 2, 1, 0, 0],
[2, 2, 1, 0, 1],
[2, 2, 1, 0, 2],
[2, 2, 1, 1, 0],
[2, 2, 1, 1, 1],
[2, 2, 1, 1, 2],
[2, 2, 1, 2, 0],
[2, 2, 1, 2, 1],
[2, 2, 1, 2, 2],
[2, 2, 2, 0, 0],
[2, 2, 2, 0, 1],
[2, 2, 2, 0, 2],
[2, 2, 2, 1, 0],
[2, 2, 2, 1, 1],
[2, 2, 2, 1, 2],
[2, 2, 2, 2, 0],
[2, 2, 2, 2, 1],
[2, 2, 2, 2, 2],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 2],
[0, 0, 0, 1, 0],
[0, 0, 0, 1, 1],
[0, 0, 0, 1, 2],
[0, 0, 0, 2, 0],
[0, 0, 0, 2, 1],
[0, 0, 0, 2, 2],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 1],
[0, 0, 1, 0, 2],
[0, 0, 1, 1, 0],
]
def test_pop_bits():
data = bitarray("1010101010101010")
assert mp7.pop_bits(data, 4, 8) == (170, 12)
def test_read_mp7_signature(mp4_file):
sig = idk.video_mp7sig_extract(mp4_file)
result = idk.read_mp7_signature(sig)
frame = result[-1]
assert isinstance(frame, mp7.Frame)
assert frame.confidence == 77
assert frame.elapsed == Fraction(299, 5)
assert frame.vector.tolist() == [
0,
0,
1,
1,
0,
0,
1,
0,
1,
0,
0,
0,
1,
0,
0,
0,
1,
1,
0,
0,
0,
0,
0,
0,
1,
1,
0,
0,
0,
1,
1,
0,
1,
1,
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
0,
0,
2,
1,
1,
0,
1,
1,
1,
1,
1,
2,
2,
2,
0,
2,
2,
2,
0,
0,
0,
0,
0,
1,
2,
2,
1,
0,
2,
1,
1,
2,
2,
1,
1,
2,
1,
1,
2,
0,
1,
1,
0,
2,
2,
1,
1,
1,
2,
1,
1,
1,
0,
2,
0,
0,
0,
2,
2,
2,
0,
2,
1,
2,
2,
2,
0,
0,
0,
1,
2,
1,
1,
1,
1,
1,
2,
0,
1,
0,
2,
0,
0,
2,
2,
1,
1,
0,
0,
2,
2,
2,
2,
1,
2,
0,
0,
2,
2,
0,
2,
2,
2,
2,
0,
1,
0,
0,
1,
0,
1,
2,
0,
1,
1,
1,
1,
2,
0,
0,
0,
0,
0,
2,
1,
2,
0,
1,
1,
1,
1,
0,
1,
2,
2,
2,
1,
1,
0,
1,
2,
2,
2,
1,
0,
0,
2,
2,
2,
1,
1,
2,
2,
2,
1,
2,
2,
2,
2,
2,
2,
0,
1,
2,
0,
0,
2,
2,
1,
0,
2,
0,
0,
2,
1,
0,
2,
2,
2,
1,
1,
1,
1,
1,
2,
0,
0,
2,
2,
2,
1,
2,
1,
1,
0,
1,
1,
2,
0,
1,
1,
0,
1,
1,
0,
2,
1,
1,
0,
0,
1,
2,
2,
2,
0,
0,
1,
2,
2,
0,
2,
0,
0,
0,
2,
2,
0,
2,
2,
0,
2,
0,
1,
0,
1,
0,
1,
1,
0,
1,
1,
0,
2,
2,
2,
0,
2,
1,
0,
1,
1,
2,
1,
2,
0,
1,
0,
2,
1,
0,
0,
0,
1,
0,
1,
0,
1,
1,
0,
2,
1,
0,
1,
1,
0,
0,
0,
2,
0,
2,
1,
0,
0,
2,
2,
2,
2,
2,
2,
0,
1,
2,
2,
0,
1,
1,
0,
2,
1,
0,
1,
2,
0,
1,
1,
1,
2,
2,
1,
0,
0,
2,
1,
1,
2,
0,
1,
0,
2,
1,
0,
2,
2,
2,
0,
1,
1,
1,
1,
2,
1,
2,
0,
2,
2,
]
| 16.895928
| 48
| 0.187199
| 1,749
| 11,202
| 1.186964
| 0.026301
| 0.202312
| 0.118497
| 0.061657
| 0.799615
| 0.799615
| 0.794798
| 0.738439
| 0.543353
| 0.128131
| 0
| 0.392297
| 0.612926
| 11,202
| 662
| 49
| 16.92145
| 0.086485
| 0
| 0
| 0.619847
| 0
| 0
| 0.001428
| 0
| 0
| 0
| 0
| 0
| 0.00916
| 1
| 0.00458
| false
| 0
| 0.006107
| 0
| 0.010687
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0b198aee591de64501f627df00dca996caa4cf2e
| 175
|
py
|
Python
|
alfworld/agents/eval/__init__.py
|
roy860328/VSGM
|
3ec19f9cf1401cecf45527687936b8fe4167f672
|
[
"MIT"
] | 6
|
2021-05-22T15:33:42.000Z
|
2022-01-12T03:34:39.000Z
|
alfworld/agents/eval/__init__.py
|
roy860328/VSGM
|
3ec19f9cf1401cecf45527687936b8fe4167f672
|
[
"MIT"
] | 1
|
2021-06-19T10:04:13.000Z
|
2021-06-20T03:37:23.000Z
|
alfworld/agents/eval/__init__.py
|
roy860328/VSGM
|
3ec19f9cf1401cecf45527687936b8fe4167f672
|
[
"MIT"
] | null | null | null |
from agents.eval.evaluate_dagger import evaluate_dagger
from agents.eval.evaluate_vision_dagger import evaluate_vision_dagger
from agents.eval.evaluate_dqn import evaluate_dqn
| 58.333333
| 69
| 0.902857
| 26
| 175
| 5.769231
| 0.307692
| 0.2
| 0.28
| 0.44
| 0.373333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062857
| 175
| 3
| 70
| 58.333333
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0b1cc392777e46ad5986d71666bdb67b89dc9d58
| 3,252
|
py
|
Python
|
ckanext/datapusher/tests/test_default_views.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 2,805
|
2015-01-02T18:13:15.000Z
|
2022-03-31T03:35:01.000Z
|
ckanext/datapusher/tests/test_default_views.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 3,801
|
2015-01-02T11:05:36.000Z
|
2022-03-31T19:24:37.000Z
|
ckanext/datapusher/tests/test_default_views.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 1,689
|
2015-01-02T19:46:43.000Z
|
2022-03-28T14:59:43.000Z
|
# encoding: utf-8
import datetime
import pytest
from ckan.tests import helpers, factories
@pytest.mark.ckan_config("ckan.views.default_views", "recline_grid_view")
@pytest.mark.ckan_config(
"ckan.plugins", "datapusher datastore recline_grid_view"
)
@pytest.mark.usefixtures("clean_db", "with_plugins")
def test_datapusher_creates_default_views_on_complete():
dataset = factories.Dataset()
resource = factories.Resource(package_id=dataset["id"])
# Push data directly to the DataStore for the resource to be marked as
# `datastore_active=True`, so the grid view can be created
data = {
"resource_id": resource["id"],
"fields": [{"id": "a", "type": "text"}, {"id": "b", "type": "text"}],
"records": [{"a": "1", "b": "2"}],
"force": True,
}
helpers.call_action("datastore_create", **data)
# Create a task for `datapusher_hook` to update
task_dict = {
"entity_id": resource["id"],
"entity_type": "resource",
"task_type": "datapusher",
"key": "datapusher",
"value": '{"job_id": "my_id", "job_key":"my_key"}',
"last_updated": str(datetime.datetime.now()),
"state": "pending",
}
helpers.call_action("task_status_update", context={}, **task_dict)
# Call datapusher_hook with a status of complete to trigger the
# default views creation
params = {
"status": "complete",
"metadata": {"resource_id": resource["id"]},
}
helpers.call_action("datapusher_hook", context={}, **params)
views = helpers.call_action("resource_view_list", id=resource["id"])
assert len(views) == 1
assert views[0]["view_type"] == "recline_grid_view"
@pytest.mark.ckan_config("ckan.views.default_views", "recline_grid_view")
@pytest.mark.ckan_config(
"ckan.plugins", "datapusher datastore recline_grid_view"
)
@pytest.mark.usefixtures("clean_db", "with_plugins")
def test_datapusher_does_not_create_default_views_on_pending():
dataset = factories.Dataset()
resource = factories.Resource(package_id=dataset["id"])
# Push data directly to the DataStore for the resource to be marked as
# `datastore_active=True`, so the grid view can be created
data = {
"resource_id": resource["id"],
"fields": [{"id": "a", "type": "text"}, {"id": "b", "type": "text"}],
"records": [{"a": "1", "b": "2"}],
"force": True,
}
helpers.call_action("datastore_create", **data)
# Create a task for `datapusher_hook` to update
task_dict = {
"entity_id": resource["id"],
"entity_type": "resource",
"task_type": "datapusher",
"key": "datapusher",
"value": '{"job_id": "my_id", "job_key":"my_key"}',
"last_updated": str(datetime.datetime.now()),
"state": "pending",
}
helpers.call_action("task_status_update", context={}, **task_dict)
# Call datapusher_hook with a status of complete to trigger the
# default views creation
params = {"status": "pending", "metadata": {"resource_id": resource["id"]}}
helpers.call_action("datapusher_hook", context={}, **params)
views = helpers.call_action("resource_view_list", id=resource["id"])
assert len(views) == 0
| 32.52
| 79
| 0.637454
| 399
| 3,252
| 4.977444
| 0.220551
| 0.060423
| 0.048338
| 0.05287
| 0.91289
| 0.91289
| 0.91289
| 0.91289
| 0.905337
| 0.905337
| 0
| 0.003066
| 0.197724
| 3,252
| 99
| 80
| 32.848485
| 0.758145
| 0.162669
| 0
| 0.707692
| 0
| 0
| 0.307267
| 0.017706
| 0
| 0
| 0
| 0
| 0.046154
| 1
| 0.030769
| false
| 0
| 0.046154
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
505f109474b1e15582f870886c4933191025b501
| 6,120
|
py
|
Python
|
tests/causal_refuters/test_bootstrap_refuter.py
|
ErikHambardzumyan/dowhy
|
685f64723e2a37334164dbb8da7d55f4e45975de
|
[
"MIT"
] | 1
|
2020-05-30T21:04:27.000Z
|
2020-05-30T21:04:27.000Z
|
tests/causal_refuters/test_bootstrap_refuter.py
|
ErikHambardzumyan/dowhy
|
685f64723e2a37334164dbb8da7d55f4e45975de
|
[
"MIT"
] | null | null | null |
tests/causal_refuters/test_bootstrap_refuter.py
|
ErikHambardzumyan/dowhy
|
685f64723e2a37334164dbb8da7d55f4e45975de
|
[
"MIT"
] | 1
|
2020-07-30T12:32:46.000Z
|
2020-07-30T12:32:46.000Z
|
import pytest
import numpy as np
from .base import TestRefuter
@pytest.mark.usefixtures("fixed_seed")
class TestDataSubsetRefuter(object):
'''
The first two tests are for the default behavior, in which we just bootstrap the data
and obtain the estimate.
'''
@pytest.mark.parametrize(["error_tolerance","estimator_method","num_samples"],
[(0.05, "iv.instrumental_variable",1000)])
def test_refutation_bootstrap_refuter_continuous(self, error_tolerance, estimator_method, num_samples):
refuter_tester = TestRefuter(error_tolerance, estimator_method, "bootstrap_refuter")
refuter_tester.continuous_treatment_testsuite(num_samples=num_samples) # Run both
@pytest.mark.parametrize(["error_tolerance", "estimator_method","num_samples"],
[(0.05, "backdoor.propensity_score_matching",1000)])
def test_refutation_bootstrap_refuter_binary(self, error_tolerance, estimator_method, num_samples):
refuter_tester = TestRefuter(error_tolerance, estimator_method, "bootstrap_refuter")
refuter_tester.binary_treatment_testsuite(tests_to_run="atleast-one-common-cause", num_samples=num_samples)
@pytest.mark.parametrize(["error_tolerance","estimator_method","num_common_causes","required_variables", "num_samples"],
[(0.05, "iv.instrumental_variable",5, 3, 1000)])
def test_refutation_bootstrap_refuter_continuous_integer_argument(self, error_tolerance, estimator_method, num_common_causes, required_variables, num_samples):
refuter_tester = TestRefuter(error_tolerance,
estimator_method,
"bootstrap_refuter",
required_variables=required_variables,
)
refuter_tester.continuous_treatment_testsuite(num_samples=num_samples,num_common_causes=num_common_causes, tests_to_run="atleast-one-common-cause") # Run atleast one common cause
@pytest.mark.parametrize(["error_tolerance","estimator_method", "num_common_causes", "required_variables", "num_samples"],
[(0.05, "iv.instrumental_variable", 5, ["W0","W1"], 1000)])
def test_refutation_bootstrap_refuter_continuous_list_argument(self, error_tolerance, estimator_method, num_common_causes, required_variables, num_samples):
refuter_tester = TestRefuter(error_tolerance,
estimator_method,
"bootstrap_refuter",
required_variables=required_variables)
refuter_tester.continuous_treatment_testsuite(num_samples=num_samples,num_common_causes=num_common_causes, tests_to_run="atleast-one-common-cause") # Run atleast one common cause
@pytest.mark.parametrize(["error_tolerance", "estimator_method", "num_common_causes", "required_variables", "num_samples"],
[(0.1, "backdoor.propensity_score_matching", 5, 3, 5000)])
def test_refutation_bootstrap_refuter_binary_integer_argument(self, error_tolerance, estimator_method, num_common_causes, required_variables, num_samples):
refuter_tester = TestRefuter(error_tolerance,
estimator_method,
"bootstrap_refuter",
required_variables=required_variables)
refuter_tester.binary_treatment_testsuite(num_samples=num_samples,num_common_causes=num_common_causes, tests_to_run="atleast-one-common-cause")
@pytest.mark.parametrize(["error_tolerance", "estimator_method", "num_common_causes", "required_variables", "num_samples"],
[(0.1, "backdoor.propensity_score_matching",5, ["W0", "W1"], 5000)])
def test_refutation_bootstrap_refuter_binary_list_argument(self, error_tolerance, estimator_method, num_common_causes, required_variables, num_samples):
refuter_tester = TestRefuter(error_tolerance,
estimator_method,
"bootstrap_refuter",
required_variables=required_variables)
refuter_tester.binary_treatment_testsuite(num_samples=num_samples,num_common_causes=num_common_causes, tests_to_run="atleast-one-common-cause")
@pytest.mark.parametrize(["error_tolerance","estimator_method", "num_common_causes", "required_variables", "num_samples"],
[(0.1, "iv.instrumental_variable", 5, ["-W0","-W1"], 5000)])
def test_refutation_bootstrap_refuter_continuous_list_negative_argument(self, error_tolerance, estimator_method, num_common_causes, required_variables, num_samples):
refuter_tester = TestRefuter(error_tolerance,
estimator_method,
"bootstrap_refuter",
required_variables=required_variables)
refuter_tester.continuous_treatment_testsuite(num_samples=num_samples,num_common_causes=num_common_causes, tests_to_run="atleast-one-common-cause") # Run atleast one common cause
@pytest.mark.parametrize(["error_tolerance", "estimator_method", "num_common_causes", "required_variables", "num_samples"],
[(0.1, "backdoor.propensity_score_matching",5, ["-W0", "-W1"], 5000)])
def test_refutation_bootstrap_refuter_binary_list_negative_argument(self, error_tolerance, estimator_method, num_common_causes, required_variables, num_samples):
refuter_tester = TestRefuter(error_tolerance,
estimator_method,
"bootstrap_refuter",
required_variables=required_variables)
refuter_tester.binary_treatment_testsuite(num_samples=num_samples,num_common_causes=num_common_causes, tests_to_run="atleast-one-common-cause")
| 75.555556
| 187
| 0.668301
| 626
| 6,120
| 6.110224
| 0.126198
| 0.08366
| 0.144314
| 0.181961
| 0.931242
| 0.924967
| 0.906928
| 0.842353
| 0.842353
| 0.818824
| 0
| 0.014709
| 0.244608
| 6,120
| 81
| 188
| 75.555556
| 0.812676
| 0.033824
| 0
| 0.609375
| 0
| 0
| 0.191658
| 0.068942
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.046875
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
acbcf6725dc0ba1da9d97fa878fdf8260be21451
| 1,832
|
py
|
Python
|
tests/_validation/test_size_validation.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 16
|
2021-04-16T02:01:29.000Z
|
2022-01-01T08:53:49.000Z
|
tests/_validation/test_size_validation.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 613
|
2021-03-24T03:37:38.000Z
|
2022-03-26T10:58:37.000Z
|
tests/_validation/test_size_validation.py
|
simon-ritchie/apyscript
|
c319f8ab2f1f5f7fad8d2a8b4fc06e7195476279
|
[
"MIT"
] | 2
|
2021-06-20T07:32:58.000Z
|
2021-12-26T08:22:11.000Z
|
from apysc._validation import size_validation
from tests import testing_helper
def test_validate_size_is_int() -> None:
size_validation.validate_size_is_int(
size=100, err_msg='Specified width is not integer value.')
testing_helper.assert_raises(
expected_error_class=ValueError,
func_or_method=size_validation.validate_size_is_int,
kwargs={
'size': '100px',
'err_msg': 'Specified width is not integer value.'})
testing_helper.assert_raises(
expected_error_class=ValueError,
func_or_method=size_validation.validate_size_is_int,
kwargs={'size': '100px'})
def test_validate_size_is_gt_zero() -> None:
size_validation.validate_size_is_gt_zero(
size=1,
err_msg='Specified width is less than or equal to zero.')
testing_helper.assert_raises(
expected_error_class=ValueError,
func_or_method=size_validation.validate_size_is_gt_zero,
kwargs={'size': 0})
testing_helper.assert_raises(
expected_error_class=ValueError,
func_or_method=size_validation.validate_size_is_gt_zero,
kwargs={
'size': 0,
'err_msg': 'Specified width is less than or equal to zero.'})
def test_validate_size_is_gte_zero() -> None:
size_validation.validate_size_is_gte_zero(size=0)
size_validation.validate_size_is_gte_zero(size=100)
testing_helper.assert_raises(
expected_error_class=ValueError,
func_or_method=size_validation.validate_size_is_gte_zero,
kwargs={'size': -1})
testing_helper.assert_raises(
expected_error_class=ValueError,
func_or_method=size_validation.validate_size_is_gte_zero,
kwargs={'size': -1, 'err_msg': 'Size is invalid.'})
| 34.566038
| 74
| 0.690502
| 236
| 1,832
| 4.915254
| 0.182203
| 0.072414
| 0.156897
| 0.224138
| 0.909483
| 0.850862
| 0.82069
| 0.777586
| 0.710345
| 0.710345
| 0
| 0.012694
| 0.225983
| 1,832
| 52
| 75
| 35.230769
| 0.80536
| 0
| 0
| 0.5
| 0
| 0
| 0.133146
| 0
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.075
| true
| 0
| 0.05
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
acd08af32f9f9595b90a741db2d815eaa313f2ec
| 19,312
|
py
|
Python
|
multicurrency/rupee.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | 2
|
2021-03-26T18:19:57.000Z
|
2021-07-27T01:15:50.000Z
|
multicurrency/rupee.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | null | null | null |
multicurrency/rupee.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
#
# copyright: 2020-2022, Frederico Martins
# author: Frederico Martins <http://github.com/fscm>
# license: SPDX-License-Identifier: MIT
"""Rupee currency representation(s)."""
from decimal import Decimal
from typing import Optional, Union
from .currency import Currency
class IndianRupee(Currency):
"""Indian Rupee currency representation.
Simple usage example:
>>> from multicurrency import IndianRupee
>>> indian_rupee = IndianRupee(
... amount=123456.789)
>>> print(indian_rupee)
₹123,456.79
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ''.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '',
**other) -> 'IndianRupee':
"""Class creator.
Returns:
IndianRupee: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='INR',
numeric_code='356',
symbol='₹',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='₹',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class IndianRupeeBT(Currency):
"""Indian Rupee BT currency representation.
Simple usage example:
>>> from multicurrency import IndianRupeeBT
>>> indian_rupee_bt = IndianRupeeBT(
... amount=123456.789)
>>> print(indian_rupee_bt)
₹123,456.79
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ''.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '',
**other) -> 'IndianRupeeBT':
"""Class creator.
Returns:
IndianRupeeBT: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='INR',
numeric_code='356',
symbol='₹',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='BT₹',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class IndianRupeeIN(Currency):
"""Indian Rupee IN currency representation.
Simple usage example:
>>> from multicurrency import IndianRupeeIN
>>> indian_rupee_in = IndianRupeeIN(
... amount=123456.789)
>>> print(indian_rupee_in)
₹123,456.79
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ''.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '',
**other) -> 'IndianRupeeIN':
"""Class creator.
Returns:
IndianRupeeIN: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='INR',
numeric_code='356',
symbol='₹',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='IN₹',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class SriLankaRupee(Currency):
"""Sri Lanka Rupee currency representation.
Simple usage example:
>>> from multicurrency import SriLankaRupee
>>> sri_lanka_rupee = SriLankaRupee(
... amount=123456.789)
>>> print(sri_lanka_rupee)
රු. 123,456.79
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'SriLankaRupee':
"""Class creator.
Returns:
SriLankaRupee: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='LKR',
numeric_code='144',
symbol='රු.',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='රු.',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class MauritiusRupee(Currency):
"""Mauritius Rupee currency representation.
Simple usage example:
>>> from multicurrency import MauritiusRupee
>>> mauritius_rupee = MauritiusRupee(
... amount=123456.789)
>>> print(mauritius_rupee)
₨ 123,456.79
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'MauritiusRupee':
"""Class creator.
Returns:
MauritiusRupee: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='MUR',
numeric_code='480',
symbol='₨',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='₨',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class NepaleseRupee(Currency):
"""Nepalese Rupee currency representation.
Simple usage example:
>>> from multicurrency import NepaleseRupee
>>> nepalese_rupee = NepaleseRupee(
... amount=123456.789)
>>> print(nepalese_rupee)
नेरू १२३,४५६.७९
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'NepaleseRupee':
"""Class creator.
Returns:
NepaleseRupee: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='NPR',
numeric_code='524',
symbol='नेरू',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='नेरू',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='०१२३४५६७८९-',
international=international)
class PakistanRupee(Currency):
"""Pakistan Rupee currency representation.
Simple usage example:
>>> from multicurrency import PakistanRupee
>>> pakistan_rupee = PakistanRupee(
... amount=123456.789)
>>> print(pakistan_rupee)
₨ 123,456.79
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'PakistanRupee':
"""Class creator.
Returns:
PakistanRupee: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='PKR',
numeric_code='586',
symbol='₨',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='₨',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class SeychellesRupee(Currency):
"""Seychelles Rupee currency representation.
Simple usage example:
>>> from multicurrency import SeychellesRupee
>>> seychelles_rupee = SeychellesRupee(
... amount=123456.789)
>>> print(seychelles_rupee)
₨ 123,456.79
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to '.'.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ','.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to True.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = '.',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = ',',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = True,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'SeychellesRupee':
"""Class creator.
Returns:
SeychellesRupee: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='SCR',
numeric_code='690',
symbol='₨',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='₨',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
| 36.300752
| 77
| 0.595795
| 1,914
| 19,312
| 5.855799
| 0.0721
| 0.049964
| 0.034261
| 0.027123
| 0.870004
| 0.868754
| 0.860457
| 0.860457
| 0.849215
| 0.812812
| 0
| 0.01695
| 0.3096
| 19,312
| 531
| 78
| 36.369115
| 0.820971
| 0.502019
| 0
| 0.837004
| 0
| 0
| 0.027988
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035242
| false
| 0
| 0.013216
| 0
| 0.154185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c5e5b88e1ce1523a785665937a25b4744cedc2da
| 199
|
py
|
Python
|
tp/ui/__init__.py
|
chinapnr/agbot
|
9739ce1c2198e50111629db2d1de785edd06876e
|
[
"MIT"
] | 2
|
2018-06-23T06:48:46.000Z
|
2018-06-23T10:11:50.000Z
|
tp/ui/__init__.py
|
chinapnr/agbot
|
9739ce1c2198e50111629db2d1de785edd06876e
|
[
"MIT"
] | 5
|
2020-01-03T09:33:02.000Z
|
2021-06-02T00:49:52.000Z
|
tp/ui/__init__.py
|
chinapnr/agbot
|
9739ce1c2198e50111629db2d1de785edd06876e
|
[
"MIT"
] | 1
|
2021-07-07T07:17:27.000Z
|
2021-07-07T07:17:27.000Z
|
from agbot.core.model.context import VerticalContext
from .tp_ui import UiTestPoint
def run(tp_conf_dict, vertical_context: VerticalContext):
return UiTestPoint(tp_conf_dict, vertical_context)
| 28.428571
| 57
| 0.834171
| 27
| 199
| 5.888889
| 0.592593
| 0.075472
| 0.125786
| 0.226415
| 0.314465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105528
| 199
| 6
| 58
| 33.166667
| 0.893258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a8bc33b7fe19af62c72d2b01e82f7b230434cd26
| 37
|
py
|
Python
|
src/lib/sunau.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/sunau.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/sunau.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("sunau")
| 18.5
| 36
| 0.756757
| 6
| 37
| 3.833333
| 0.666667
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.676471
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7663ceebbed78990198de735e13f9f7add130be4
| 20,843
|
py
|
Python
|
archi/net.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | 2
|
2019-03-20T09:05:02.000Z
|
2019-03-20T15:23:44.000Z
|
archi/net.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
archi/net.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import torch
import torch.nn as nn
from . import layers
from .blocks import ResidualAverageBlock
from .blocks import ResidualBlock
from .blocks import softmax_cat
class BaseArchi(nn.Module):
def __init__(self, n_unit=80):
super().__init__()
self.name = "{}x{:d}".format(self.__class__.__name__, n_unit)
"""
Fix residual net initialization
https://openreview.net/forum?id=H1gsz30cKX
Or use batch norm ?
---
Note :
E = Extra input
L = Linear
A = Average layer
S = Sum layer
M = Mean operation
R = Residual block
AR = Average Residual block
"""
class F6(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=80):
super().__init__(n_unit)
self.fc_in = nn.Linear(n_in, n_unit)
self.fc1 = nn.Linear(n_unit, n_unit)
self.fc2 = nn.Linear(n_unit, n_unit)
self.fc3 = nn.Linear(n_unit, n_unit)
self.fc4 = nn.Linear(n_unit, n_unit)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x):
x = self.fc_in(x)
x = torch.relu(x)
x = self.fc1(x)
x = torch.relu(x)
x = self.fc2(x)
x = torch.relu(x)
x = self.fc3(x)
x = torch.relu(x)
x = self.fc4(x)
x = torch.relu(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.fc_in.reset_parameters()
self.fc1.reset_parameters()
self.fc2.reset_parameters()
self.fc3.reset_parameters()
self.fc4.reset_parameters()
self.fc_out.reset_parameters()
class RegNet(BaseArchi):
def __init__(self, n_in=1, n_out=1):
N_UNITS = 80
super().__init__(N_UNITS)
self.avg1 = layers.Average(n_in, N_UNITS)
self.avg2 = layers.Average(N_UNITS, N_UNITS)
self.avg3 = layers.Average(N_UNITS, N_UNITS)
self.fc1 = nn.Linear(N_UNITS, N_UNITS)
self.fc2 = nn.Linear(N_UNITS, N_UNITS)
self.fc3 = nn.Linear(N_UNITS*2, N_UNITS)
self.fc_out = nn.Linear(N_UNITS, n_out)
def forward(self, x, w, p=None):
x = self.avg1(x, w)
x = torch.relu(x)
# x = self.fc1(x)
# x = torch.softmax(x, 1)
x = self.avg2(x, w)
x = torch.relu(x)
x_ = self.fc2(x)
x_ = torch.softmax(x_, 1)
x_ = self.avg3(x_, w)
x_ = torch.relu(x_)
x = torch.cat((x, x_), 1)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.fc3(x)
x = torch.relu(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.fc1.reset_parameters()
self.fc2.reset_parameters()
self.fc3.reset_parameters()
self.fc_out.reset_parameters()
class RegNetExtra(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_extra=0, n_unit=80):
super().__init__(n_unit)
self.avg1 = layers.AverageExtra(n_in, n_unit, n_extra)
self.avg2 = layers.Average(n_unit, n_unit)
self.avg3 = layers.Average(n_unit, n_unit)
self.fc1 = nn.Linear(n_unit, n_unit)
self.fc2 = nn.Linear(n_unit, n_unit)
self.fc3 = nn.Linear(n_unit*2, n_unit)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.avg1(x, w, p)
x = torch.nn.functional.relu6(x)
# x = self.fc1(x)
# x = torch.softmax(x, 1)
x = self.avg2(x, w)
x = torch.nn.functional.relu6(x)
x_ = self.fc2(x)
x_ = torch.softmax(x_, 1)
x_ = self.avg3(x_, w)
x_ = torch.nn.functional.relu6(x_)
x = torch.cat((x, x_), 1)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.fc3(x)
x = torch.nn.functional.relu6(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.fc1.reset_parameters()
self.fc2.reset_parameters()
self.fc3.reset_parameters()
self.fc_out.reset_parameters()
class AR9R1(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.Average(n_in, n_unit)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg3 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg4 = ResidualAverageBlock(n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p=None):
x = self.avg_in(x, w)
x = self.avg1(x, w)
x = self.avg2(x, w)
x = self.avg3(x, w)
x = self.avg4(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.avg4.reset_parameters()
self.fc_out.reset_parameters()
class AR5R5(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.Average(n_in, n_unit)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.res3 = ResidualBlock (n_unit, n_unit//2)
self.res4 = ResidualBlock (n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p=None):
x = self.avg_in(x, w)
x = self.avg1(x, w)
x = self.avg2(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.res3(x)
x = self.res4(x)
x = layers.relu6_tanh(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.res3.reset_parameters()
self.res4.reset_parameters()
self.fc_out.reset_parameters()
class AR5R5E(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_extra=0, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.AverageExtra(n_in, n_unit, n_extra)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.res3 = ResidualBlock (n_unit, n_unit//2)
self.res4 = ResidualBlock (n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.avg_in(x, w, p)
x = self.avg1(x, w)
x = self.avg2(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.res3(x)
x = self.res4(x)
x = layers.relu6_tanh(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.res3.reset_parameters()
self.res4.reset_parameters()
self.fc_out.reset_parameters()
class AR9R9E(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_extra=0, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.AverageExtra(n_in, n_unit, n_extra)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg3 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg4 = ResidualAverageBlock(n_unit, n_unit//2)
self.res5 = ResidualBlock (n_unit, n_unit//2)
self.res6 = ResidualBlock (n_unit, n_unit//2)
self.res7 = ResidualBlock (n_unit, n_unit//2)
self.res8 = ResidualBlock (n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.avg_in(x, w, p)
x = self.avg1(x, w)
x = self.avg2(x, w)
x = self.avg3(x, w)
x = self.avg4(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.res5(x)
x = self.res6(x)
x = self.res7(x)
x = self.res8(x)
x = layers.relu_tanh(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.avg4.reset_parameters()
self.res5.reset_parameters()
self.res6.reset_parameters()
self.res7.reset_parameters()
self.res8.reset_parameters()
self.fc_out.reset_parameters()
class AR9R9(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_extra=0, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.Average(n_in, n_unit)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg3 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg4 = ResidualAverageBlock(n_unit, n_unit//2)
self.res5 = ResidualBlock (n_unit, n_unit//2)
self.res6 = ResidualBlock (n_unit, n_unit//2)
self.res7 = ResidualBlock (n_unit, n_unit//2)
self.res8 = ResidualBlock (n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.avg_in(x, w)
x = self.avg1(x, w)
x = self.avg2(x, w)
x = self.avg3(x, w)
x = self.avg4(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.res5(x)
x = self.res6(x)
x = self.res7(x)
x = self.res8(x)
x = layers.relu_tanh(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.avg4.reset_parameters()
self.res5.reset_parameters()
self.res6.reset_parameters()
self.res7.reset_parameters()
self.res8.reset_parameters()
self.fc_out.reset_parameters()
class AR19R5E(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_extra=0, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.AverageExtra(n_in, n_unit, n_extra)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg3 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg4 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg5 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg6 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg7 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg8 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg9 = ResidualAverageBlock(n_unit, n_unit//2)
self.res10 = ResidualBlock (n_unit, n_unit//2)
self.res11 = ResidualBlock (n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.avg_in(x, w, p)
x = self.avg1(x, w)
x = self.avg2(x, w)
x = self.avg3(x, w)
x = self.avg4(x, w)
x = self.avg5(x, w)
x = self.avg6(x, w)
x = self.avg7(x, w)
x = self.avg8(x, w)
x = self.avg9(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.res10(x)
x = self.res11(x)
x = layers.relu_tanh(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.avg4.reset_parameters()
self.avg5.reset_parameters()
self.avg6.reset_parameters()
self.avg7.reset_parameters()
self.avg8.reset_parameters()
self.avg9.reset_parameters()
self.res10.reset_parameters()
self.res11.reset_parameters()
self.fc_out.reset_parameters()
class AR19R5(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_extra=0, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.Average(n_in, n_unit)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg3 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg4 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg5 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg6 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg7 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg8 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg9 = ResidualAverageBlock(n_unit, n_unit//2)
self.res10 = ResidualBlock (n_unit, n_unit//2)
self.res11 = ResidualBlock (n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.avg_in(x, w)
x = self.avg1(x, w)
x = self.avg2(x, w)
x = self.avg3(x, w)
x = self.avg4(x, w)
x = self.avg5(x, w)
x = self.avg6(x, w)
x = self.avg7(x, w)
x = self.avg8(x, w)
x = self.avg9(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.res10(x)
x = self.res11(x)
x = layers.relu_tanh(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.avg4.reset_parameters()
self.avg5.reset_parameters()
self.avg6.reset_parameters()
self.avg7.reset_parameters()
self.avg8.reset_parameters()
self.avg9.reset_parameters()
self.res10.reset_parameters()
self.res11.reset_parameters()
self.fc_out.reset_parameters()
class AF3R3(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.Average(n_in, n_unit)
self.avg1 = layers.Average(n_unit, n_unit)
self.avg2 = layers.Average(n_unit, n_unit)
self.fc3 = nn.Linear(n_unit, n_unit)
self.fc4 = nn.Linear(n_unit, n_unit)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p=None):
x = self.avg_in(x, w)
x = torch.relu(x)
x = self.avg1(x, w)
x = torch.relu(x)
x = self.avg2(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = torch.relu(x)
x = self.fc3(x)
x = torch.relu(x)
x = self.fc4(x)
x = torch.relu(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.fc3.reset_parameters()
self.fc4.reset_parameters()
self.fc_out.reset_parameters()
class AF3R3E(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_extra=0, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.AverageExtra(n_in, n_unit, n_extra)
self.avg1 = layers.Average(n_unit, n_unit)
self.avg2 = layers.Average(n_unit, n_unit)
self.fc3 = nn.Linear(n_unit, n_unit)
self.fc4 = nn.Linear(n_unit, n_unit)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.avg_in(x, w, p)
x = torch.relu(x)
x = self.avg1(x, w)
x = torch.relu(x)
x = self.avg2(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = torch.relu(x)
x = self.fc3(x)
x = torch.relu(x)
x = self.fc4(x)
x = torch.relu(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.fc3.reset_parameters()
self.fc4.reset_parameters()
self.fc_out.reset_parameters()
class F3R3(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=80):
super().__init__(n_unit)
self.fc_in = nn.Linear(n_in, n_unit)
self.fc1 = nn.Linear(n_unit, n_unit)
self.fc2 = nn.Linear(n_unit, n_unit)
self.fc3 = nn.Linear(n_unit, n_unit)
self.fc4 = nn.Linear(n_unit, n_unit)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p=None):
x = self.fc_in(x)
x = torch.relu(x)
x = self.fc1(x)
x = torch.relu(x)
x = self.fc2(x)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = torch.relu(x)
x = self.fc3(x)
x = torch.relu(x)
x = self.fc4(x)
x = torch.relu(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.fc_in.reset_parameters()
self.fc1.reset_parameters()
self.fc2.reset_parameters()
self.fc3.reset_parameters()
self.fc4.reset_parameters()
self.fc_out.reset_parameters()
class F3R3E(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=80):
super().__init__(n_unit)
self.fc_in = nn.Linear(n_in, n_unit)
self.fc1 = nn.Linear(n_unit, n_unit)
self.fc2 = nn.Linear(n_unit, n_unit)
self.fc3 = nn.Linear(n_unit, n_unit)
self.fc4 = nn.Linear(n_unit, n_unit)
self.fc_out = nn.Linear(n_unit, n_out)
def forward(self, x, w, p):
x = self.fc_in(x)
x = torch.relu(x)
x = self.fc1(x)
x = torch.relu(x)
x = self.fc2(x)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = torch.cat((x, p), 1)
x = torch.relu(x)
x = self.fc3(x)
x = torch.relu(x)
x = self.fc4(x)
x = torch.relu(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.fc_in.reset_parameters()
self.fc1.reset_parameters()
self.fc2.reset_parameters()
self.fc3.reset_parameters()
self.fc4.reset_parameters()
self.fc_out.reset_parameters()
class AR5S2S2R1(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=80):
super().__init__(n_unit)
self.avg_in = layers.Average(n_in, n_unit)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg3 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg4 = ResidualAverageBlock(n_unit+n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit+n_unit, n_out)
def forward(self, x, w, p=None):
x = self.avg_in(x, w)
x = self.avg1(x, w)
x = self.avg2(x, w)
x_ = self.avg3(x, w)
x = softmax_cat(x_, x)
x = self.avg4(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.avg4.reset_parameters()
self.fc_out.reset_parameters()
class AR3S2S2R3(BaseArchi):
def __init__(self, n_in=1, n_out=1, n_unit=200):
super().__init__(n_unit)
self.avg_in = layers.Average(n_in, n_unit)
self.avg1 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg2 = ResidualAverageBlock(n_unit, n_unit//2)
self.avg3 = ResidualAverageBlock(n_unit+n_unit, n_unit//2)
self.res4 = ResidualBlock (n_unit+n_unit, n_unit//2)
self.fc_out = nn.Linear(n_unit+n_unit, n_out)
def forward(self, x, w, p=None):
x = self.avg_in(x)
x = self.avg1(x, w)
x_ = self.avg2(x, w)
x = softmax_cat(x_, x)
x = self.avg3(x, w)
x = layers.torch_weighted_mean(x, w, 0, keepdim=False)
x = self.res4(x)
x = self.fc_out(x)
return x
def reset_parameters(self):
self.avg_in.reset_parameters()
self.avg1.reset_parameters()
self.avg2.reset_parameters()
self.avg3.reset_parameters()
self.avg4.reset_parameters()
self.fc_out.reset_parameters()
| 32.165123
| 69
| 0.587919
| 3,091
| 20,843
| 3.714979
| 0.040764
| 0.101454
| 0.198554
| 0.075764
| 0.943569
| 0.935818
| 0.933902
| 0.922755
| 0.918749
| 0.907428
| 0
| 0.032269
| 0.284844
| 20,843
| 647
| 70
| 32.214838
| 0.738092
| 0.004462
| 0
| 0.896226
| 0
| 0
| 0.000341
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092453
| false
| 0
| 0.018868
| 0
| 0.173585
| 0.001887
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4f3947cf1c8fb52e14e89525535201fe31cac395
| 13,847
|
py
|
Python
|
memsource_cli/api/translation_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | 1
|
2020-07-24T16:29:32.000Z
|
2020-07-24T16:29:32.000Z
|
memsource_cli/api/translation_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | null | null | null |
memsource_cli/api/translation_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Memsource REST API
Welcome to Memsource's API documentation. To view our legacy APIs please [visit our documentation](https://wiki.memsource.com/wiki/Memsource_API) and for more information about our new APIs, [visit our blog](https://www.memsource.com/blog/2017/10/24/introducing-rest-apis-qa-with-the-memsource-api-team/). If you have any questions, please contact [Memsource Support](<mailto:support@memsource.com>). # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from memsource_cli.api_client import ApiClient
class TranslationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def human_translate(self, project_uid, **kwargs): # noqa: E501
"""Human translate (Gengo or Unbabel) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.human_translate(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param HumanTranslateJobsDto body:
:return: AsyncRequestWrapperDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.human_translate_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.human_translate_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def human_translate_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Human translate (Gengo or Unbabel) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.human_translate_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param HumanTranslateJobsDto body:
:return: AsyncRequestWrapperDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method human_translate" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `human_translate`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/jobs/humanTranslate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AsyncRequestWrapperDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_translation_job(self, project_uid, job_uid, **kwargs): # noqa: E501
"""Translate using machine translation # noqa: E501
Configured machine translate settings is used # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_translation_job(project_uid, job_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param str job_uid: (required)
:param TranslationRequestDto body:
:return: MachineTranslateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_translation_job_with_http_info(project_uid, job_uid, **kwargs) # noqa: E501
else:
(data) = self.machine_translation_job_with_http_info(project_uid, job_uid, **kwargs) # noqa: E501
return data
def machine_translation_job_with_http_info(self, project_uid, job_uid, **kwargs): # noqa: E501
"""Translate using machine translation # noqa: E501
Configured machine translate settings is used # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_translation_job_with_http_info(project_uid, job_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param str job_uid: (required)
:param TranslationRequestDto body:
:return: MachineTranslateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'job_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_translation_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `machine_translation_job`") # noqa: E501
# verify the required parameter 'job_uid' is set
if ('job_uid' not in params or
params['job_uid'] is None):
raise ValueError("Missing the required parameter `job_uid` when calling `machine_translation_job`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
if 'job_uid' in params:
path_params['jobUid'] = params['job_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/jobs/{jobUid}/translations/translateWithMachineTranslation', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MachineTranslateResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def pre_translate(self, project_uid, **kwargs): # noqa: E501
"""Pre-translate job # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.pre_translate(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param PreTranslateJobsDto body:
:return: AsyncRequestWrapperDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.pre_translate_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.pre_translate_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def pre_translate_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Pre-translate job # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.pre_translate_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param PreTranslateJobsDto body:
:return: AsyncRequestWrapperDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method pre_translate" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `pre_translate`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/jobs/preTranslate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AsyncRequestWrapperDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.338068
| 421
| 0.620568
| 1,572
| 13,847
| 5.221374
| 0.124046
| 0.049708
| 0.019006
| 0.024854
| 0.887305
| 0.880239
| 0.873538
| 0.859527
| 0.85173
| 0.845517
| 0
| 0.017549
| 0.288077
| 13,847
| 351
| 422
| 39.450142
| 0.815074
| 0.339713
| 0
| 0.78022
| 0
| 0
| 0.202828
| 0.0625
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.021978
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f93be45d400a92a989c773a2cae0f4e98838035
| 165
|
py
|
Python
|
src/auction_api/models/__init__.py
|
4heck/auction_backend
|
c9568d45f5f4873f71ae71ced72b8e5a0a02d273
|
[
"MIT"
] | null | null | null |
src/auction_api/models/__init__.py
|
4heck/auction_backend
|
c9568d45f5f4873f71ae71ced72b8e5a0a02d273
|
[
"MIT"
] | null | null | null |
src/auction_api/models/__init__.py
|
4heck/auction_backend
|
c9568d45f5f4873f71ae71ced72b8e5a0a02d273
|
[
"MIT"
] | null | null | null |
from auction_api.models.user import User
from auction_api.models.auction import Auction
from auction_api.models.bid import Bid
__all__ = ["User", "Auction", "Bid"]
| 27.5
| 46
| 0.787879
| 25
| 165
| 4.92
| 0.32
| 0.268293
| 0.341463
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 165
| 5
| 47
| 33
| 0.836735
| 0
| 0
| 0
| 0
| 0
| 0.084848
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
96ce068c86f6f860bcc65a2d6132941a759a8f00
| 7,331
|
py
|
Python
|
model_scripts/pickled_app.py
|
beilmanmich/cap_june_2017
|
7bd4bc56e3d610e5919e8259d0d0cb6f2af06d9d
|
[
"CC-BY-3.0"
] | null | null | null |
model_scripts/pickled_app.py
|
beilmanmich/cap_june_2017
|
7bd4bc56e3d610e5919e8259d0d0cb6f2af06d9d
|
[
"CC-BY-3.0"
] | null | null | null |
model_scripts/pickled_app.py
|
beilmanmich/cap_june_2017
|
7bd4bc56e3d610e5919e8259d0d0cb6f2af06d9d
|
[
"CC-BY-3.0"
] | null | null | null |
import flask
import numpy as np
import pandas as pd
import pickle
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.neighbors import KDTree
import os
#---------- MODEL IN MEMORY ----------------#
df = pd.DataFrame(pickle.load(open('static/dummied_recent_data.pkl', 'rb')))
essay_df = pd.DataFrame(pickle.load(open('static/short_essays.pkl', 'rb')))
X = df[[
'school_previous_projects',
'teacher_previous_projects',
'month',
'log_price_including',
'sqrt_students_reached',
'price_per_student',
'total_state_donors',
'total_state_projects',
'state_avg_donors',
'primary_focus_subject_Applied Sciences',
'primary_focus_subject_Character Education',
'primary_focus_subject_Civics & Government',
'primary_focus_subject_College & Career Prep',
'primary_focus_subject_Community Service',
'primary_focus_subject_ESL',
'primary_focus_subject_Early Development',
'primary_focus_subject_Economics',
'primary_focus_subject_Environmental Science',
'primary_focus_subject_Extracurricular',
'primary_focus_subject_Financial Literacy',
'primary_focus_subject_Foreign Languages',
'primary_focus_subject_Gym & Fitness',
'primary_focus_subject_Health & Life Science',
'primary_focus_subject_Health & Wellness',
'primary_focus_subject_History & Geography',
'primary_focus_subject_Literacy',
'primary_focus_subject_Literature & Writing',
'primary_focus_subject_Mathematics',
'primary_focus_subject_Music',
'primary_focus_subject_Nutrition',
'primary_focus_subject_Other',
'primary_focus_subject_Parent Involvement',
'primary_focus_subject_Performing Arts',
'primary_focus_subject_Social Sciences',
'primary_focus_subject_Special Needs',
'primary_focus_subject_Team Sports',
'primary_focus_subject_Visual Arts',
'poverty_level_high poverty',
'poverty_level_highest poverty',
'poverty_level_low poverty',
'poverty_level_moderate poverty',
'grade_level_Grades 3-5',
'grade_level_Grades 6-8',
'grade_level_Grades 9-12',
'grade_level_Grades PreK-2',
'school_metro_rural',
'school_metro_suburban',
'school_metro_urban',
'resource_type_Books',
'resource_type_Other',
'resource_type_Supplies',
'resource_type_Technology',
'resource_type_Trips',
'resource_type_Visitors',
'teacher_teach_for_america',
'optional_support']]
Y = df[['RESP']]
Y = np.ravel(Y)
PREDICTOR = LogisticRegression().fit(X, Y)
#PREDICTOR = DecisionTreeClassifier(max_depth = 4, class_weight = "auto" ).fit(X, Y)
lookup = df[[
'_projectid',
'school_previous_projects',
'teacher_previous_projects',
'month',
'log_price_including',
'sqrt_students_reached',
'price_per_student',
'total_state_donors',
'total_state_projects',
'state_avg_donors',
'primary_focus_subject_Applied Sciences',
'primary_focus_subject_Character Education',
'primary_focus_subject_Civics & Government',
'primary_focus_subject_College & Career Prep',
'primary_focus_subject_Community Service',
'primary_focus_subject_ESL',
'primary_focus_subject_Early Development',
'primary_focus_subject_Economics',
'primary_focus_subject_Environmental Science',
'primary_focus_subject_Extracurricular',
'primary_focus_subject_Financial Literacy',
'primary_focus_subject_Foreign Languages',
'primary_focus_subject_Gym & Fitness',
'primary_focus_subject_Health & Life Science',
'primary_focus_subject_Health & Wellness',
'primary_focus_subject_History & Geography',
'primary_focus_subject_Literacy',
'primary_focus_subject_Literature & Writing',
'primary_focus_subject_Mathematics',
'primary_focus_subject_Music',
'primary_focus_subject_Nutrition',
'primary_focus_subject_Other',
'primary_focus_subject_Parent Involvement',
'primary_focus_subject_Performing Arts',
'primary_focus_subject_Social Sciences',
'primary_focus_subject_Special Needs',
'primary_focus_subject_Team Sports',
'primary_focus_subject_Visual Arts',
'poverty_level_high poverty',
'poverty_level_highest poverty',
'poverty_level_low poverty',
'poverty_level_moderate poverty',
'grade_level_Grades 3-5',
'grade_level_Grades 6-8',
'grade_level_Grades 9-12',
'grade_level_Grades PreK-2',
'school_metro_rural',
'school_metro_suburban',
'school_metro_urban',
'resource_type_Books',
'resource_type_Other',
'resource_type_Supplies',
'resource_type_Technology',
'resource_type_Trips',
'resource_type_Visitors',
'teacher_teach_for_america',
'optional_support']]
collist = ['school_previous_projects',
'teacher_previous_projects',
'month',
'log_price_including',
'sqrt_students_reached',
'price_per_student',
'total_state_donors',
'total_state_projects',
'state_avg_donors',
'primary_focus_subject_Applied Sciences',
'primary_focus_subject_Character Education',
'primary_focus_subject_Civics & Government',
'primary_focus_subject_College & Career Prep',
'primary_focus_subject_Community Service',
'primary_focus_subject_ESL',
'primary_focus_subject_Early Development',
'primary_focus_subject_Economics',
'primary_focus_subject_Environmental Science',
'primary_focus_subject_Extracurricular',
'primary_focus_subject_Financial Literacy',
'primary_focus_subject_Foreign Languages',
'primary_focus_subject_Gym & Fitness',
'primary_focus_subject_Health & Life Science',
'primary_focus_subject_Health & Wellness',
'primary_focus_subject_History & Geography',
'primary_focus_subject_Literacy',
'primary_focus_subject_Literature & Writing',
'primary_focus_subject_Mathematics',
'primary_focus_subject_Music',
'primary_focus_subject_Nutrition',
'primary_focus_subject_Other',
'primary_focus_subject_Parent Involvement',
'primary_focus_subject_Performing Arts',
'primary_focus_subject_Social Sciences',
'primary_focus_subject_Special Needs',
'primary_focus_subject_Team Sports',
'primary_focus_subject_Visual Arts',
'poverty_level_high poverty',
'poverty_level_highest poverty',
'poverty_level_low poverty',
'poverty_level_moderate poverty',
'grade_level_Grades 3-5',
'grade_level_Grades 6-8',
'grade_level_Grades 9-12',
'grade_level_Grades PreK-2',
'school_metro_rural',
'school_metro_suburban',
'school_metro_urban',
'resource_type_Books',
'resource_type_Other',
'resource_type_Supplies',
'resource_type_Technology',
'resource_type_Trips',
'resource_type_Visitors',
'teacher_teach_for_america',
'optional_support']
tree = KDTree(lookup[X], metric = "chebyshev")
#---------- URLS AND WEB PAGES -------------#
app = flask.Flask(__name__)
@app.route("/")
def viz_page():
with open("dc_prediction.html", 'r') as viz_file:
return viz_file.read()
@app.route("/score", methods=["POST"])
def score():
data = flask.request.json
x = np.matrix(data["example"])
score = PREDICTOR.predict_proba(x)
dist, ind = tree.query(data["example"], k=1)
new = lookup.reset_index(drop=True)
proj_id = new.ix[ind[0][0]]['_projectid']
essay_text = pd.DataFrame(essay_df['essay'].loc[essay_df['_projectid'] == proj_id])
text = essay_text['essay'].values[0]
user_readability = len(text)
results = {"score": score[0][1], "project_text": text, "user_readability": user_readability }
return flask.jsonify(results)
@app.route("/grade_essay", methods=["POST"])
def grade_essay():
data = flask.request.json
essay_len = len(data["essay"])
results = {"readability": essay_len}
return flask.jsonify(results)
| 32.438053
| 97
| 0.790615
| 910
| 7,331
| 5.876923
| 0.216484
| 0.188482
| 0.298429
| 0.030292
| 0.810583
| 0.810583
| 0.810583
| 0.798242
| 0.798242
| 0.798242
| 0
| 0.004667
| 0.093848
| 7,331
| 225
| 98
| 32.582222
| 0.800391
| 0.023053
| 0
| 0.813397
| 0
| 0
| 0.705044
| 0.447953
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014354
| false
| 0
| 0.038278
| 0
| 0.066986
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
96dfa8bbf7d2627d47fe30d7eba05510fc81666f
| 128,679
|
py
|
Python
|
msgraph-cli-extensions/beta/sites_beta/azext_sites_beta/vendored_sdks/sites/operations/_sites_operations.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/beta/sites_beta/azext_sites_beta/vendored_sdks/sites/operations/_sites_operations.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/beta/sites_beta/azext_sites_beta/vendored_sdks/sites/operations/_sites_operations.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class SitesOperations(object):
"""SitesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~sites.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get_analytics(
self,
site_id, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum129"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum130"]]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphItemAnalytics"
"""Get analytics from sites.
Get analytics from sites.
:param site_id: key: id of site.
:type site_id: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum129]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum130]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphItemAnalytics, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphItemAnalytics
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphItemAnalytics"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_analytics.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphItemAnalytics', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_analytics.metadata = {'url': '/sites/{site-id}/analytics'} # type: ignore
def get_ref_analytics(
self,
site_id, # type: str
**kwargs # type: Any
):
# type: (...) -> str
"""Get ref of analytics from sites.
Get ref of analytics from sites.
:param site_id: key: id of site.
:type site_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_ref_analytics.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_ref_analytics.metadata = {'url': '/sites/{site-id}/analytics/$ref'} # type: ignore
def set_ref_analytics(
self,
site_id, # type: str
body, # type: Dict[str, object]
**kwargs # type: Any
):
# type: (...) -> None
"""Update the ref of navigation property analytics in sites.
Update the ref of navigation property analytics in sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property ref values.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_ref_analytics.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
set_ref_analytics.metadata = {'url': '/sites/{site-id}/analytics/$ref'} # type: ignore
def delete_ref_analytics(
self,
site_id, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete ref of navigation property analytics for sites.
Delete ref of navigation property analytics for sites.
:param site_id: key: id of site.
:type site_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_ref_analytics.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_ref_analytics.metadata = {'url': '/sites/{site-id}/analytics/$ref'} # type: ignore
def list_columns(
self,
site_id, # type: str
orderby=None, # type: Optional[List[Union[str, "models.Enum131"]]]
select=None, # type: Optional[List[Union[str, "models.Enum132"]]]
expand=None, # type: Optional[List[str]]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.CollectionOfColumnDefinition"]
"""Get columns from sites.
Get columns from sites.
:param site_id: key: id of site.
:type site_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~sites.models.Enum131]
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum132]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfColumnDefinition or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~sites.models.CollectionOfColumnDefinition]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfColumnDefinition"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_columns.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfColumnDefinition', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_columns.metadata = {'url': '/sites/{site-id}/columns'} # type: ignore
def create_columns(
self,
site_id, # type: str
body, # type: "models.MicrosoftGraphColumnDefinition"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphColumnDefinition"
"""Create new navigation property to columns for sites.
Create new navigation property to columns for sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property.
:type body: ~sites.models.MicrosoftGraphColumnDefinition
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphColumnDefinition, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphColumnDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphColumnDefinition"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_columns.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphColumnDefinition')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphColumnDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_columns.metadata = {'url': '/sites/{site-id}/columns'} # type: ignore
def get_columns(
self,
site_id, # type: str
column_definition_id, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum133"]]]
expand=None, # type: Optional[List[str]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphColumnDefinition"
"""Get columns from sites.
Get columns from sites.
:param site_id: key: id of site.
:type site_id: str
:param column_definition_id: key: id of columnDefinition.
:type column_definition_id: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum133]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphColumnDefinition, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphColumnDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphColumnDefinition"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_columns.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'columnDefinition-id': self._serialize.url("column_definition_id", column_definition_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphColumnDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_columns.metadata = {'url': '/sites/{site-id}/columns/{columnDefinition-id}'} # type: ignore
def update_columns(
self,
site_id, # type: str
column_definition_id, # type: str
body, # type: "models.MicrosoftGraphColumnDefinition"
**kwargs # type: Any
):
# type: (...) -> None
"""Update the navigation property columns in sites.
Update the navigation property columns in sites.
:param site_id: key: id of site.
:type site_id: str
:param column_definition_id: key: id of columnDefinition.
:type column_definition_id: str
:param body: New navigation property values.
:type body: ~sites.models.MicrosoftGraphColumnDefinition
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_columns.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'columnDefinition-id': self._serialize.url("column_definition_id", column_definition_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphColumnDefinition')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_columns.metadata = {'url': '/sites/{site-id}/columns/{columnDefinition-id}'} # type: ignore
def delete_columns(
self,
site_id, # type: str
column_definition_id, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete navigation property columns for sites.
Delete navigation property columns for sites.
:param site_id: key: id of site.
:type site_id: str
:param column_definition_id: key: id of columnDefinition.
:type column_definition_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_columns.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'columnDefinition-id': self._serialize.url("column_definition_id", column_definition_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_columns.metadata = {'url': '/sites/{site-id}/columns/{columnDefinition-id}'} # type: ignore
def list_content_types(
self,
site_id, # type: str
orderby=None, # type: Optional[List[Union[str, "models.Enum134"]]]
select=None, # type: Optional[List[Union[str, "models.Enum135"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum136"]]]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.CollectionOfContentType"]
"""Get contentTypes from sites.
Get contentTypes from sites.
:param site_id: key: id of site.
:type site_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~sites.models.Enum134]
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum135]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum136]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfContentType or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~sites.models.CollectionOfContentType]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfContentType"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_content_types.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfContentType', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_content_types.metadata = {'url': '/sites/{site-id}/contentTypes'} # type: ignore
def create_content_types(
self,
site_id, # type: str
body, # type: "models.MicrosoftGraphContentType"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphContentType"
"""Create new navigation property to contentTypes for sites.
Create new navigation property to contentTypes for sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property.
:type body: ~sites.models.MicrosoftGraphContentType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphContentType, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphContentType
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphContentType"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_content_types.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphContentType')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphContentType', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_content_types.metadata = {'url': '/sites/{site-id}/contentTypes'} # type: ignore
def get_content_types(
self,
site_id, # type: str
content_type_id, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum137"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum138"]]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphContentType"
"""Get contentTypes from sites.
Get contentTypes from sites.
:param site_id: key: id of site.
:type site_id: str
:param content_type_id: key: id of contentType.
:type content_type_id: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum137]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum138]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphContentType, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphContentType
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphContentType"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_content_types.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'contentType-id': self._serialize.url("content_type_id", content_type_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphContentType', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_content_types.metadata = {'url': '/sites/{site-id}/contentTypes/{contentType-id}'} # type: ignore
def update_content_types(
self,
site_id, # type: str
content_type_id, # type: str
body, # type: "models.MicrosoftGraphContentType"
**kwargs # type: Any
):
# type: (...) -> None
"""Update the navigation property contentTypes in sites.
Update the navigation property contentTypes in sites.
:param site_id: key: id of site.
:type site_id: str
:param content_type_id: key: id of contentType.
:type content_type_id: str
:param body: New navigation property values.
:type body: ~sites.models.MicrosoftGraphContentType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_content_types.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'contentType-id': self._serialize.url("content_type_id", content_type_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphContentType')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_content_types.metadata = {'url': '/sites/{site-id}/contentTypes/{contentType-id}'} # type: ignore
def delete_content_types(
self,
site_id, # type: str
content_type_id, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete navigation property contentTypes for sites.
Delete navigation property contentTypes for sites.
:param site_id: key: id of site.
:type site_id: str
:param content_type_id: key: id of contentType.
:type content_type_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_content_types.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'contentType-id': self._serialize.url("content_type_id", content_type_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_content_types.metadata = {'url': '/sites/{site-id}/contentTypes/{contentType-id}'} # type: ignore
def get_drive(
self,
site_id, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum142"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum143"]]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphDrive"
"""Get drive from sites.
Get drive from sites.
:param site_id: key: id of site.
:type site_id: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum142]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum143]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphDrive, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphDrive
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphDrive"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_drive.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphDrive', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_drive.metadata = {'url': '/sites/{site-id}/drive'} # type: ignore
def update_drive(
self,
site_id, # type: str
body, # type: "models.MicrosoftGraphDrive"
**kwargs # type: Any
):
# type: (...) -> None
"""Update the navigation property drive in sites.
Update the navigation property drive in sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property values.
:type body: ~sites.models.MicrosoftGraphDrive
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_drive.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphDrive')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_drive.metadata = {'url': '/sites/{site-id}/drive'} # type: ignore
def delete_drive(
self,
site_id, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete navigation property drive for sites.
Delete navigation property drive for sites.
:param site_id: key: id of site.
:type site_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_drive.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_drive.metadata = {'url': '/sites/{site-id}/drive'} # type: ignore
def list_drives(
self,
site_id, # type: str
orderby=None, # type: Optional[List[Union[str, "models.Enum144"]]]
select=None, # type: Optional[List[Union[str, "models.Enum145"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum146"]]]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.CollectionOfDrive"]
"""Get drives from sites.
Get drives from sites.
:param site_id: key: id of site.
:type site_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~sites.models.Enum144]
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum145]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum146]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfDrive or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~sites.models.CollectionOfDrive]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfDrive"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_drives.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfDrive', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_drives.metadata = {'url': '/sites/{site-id}/drives'} # type: ignore
def create_drives(
self,
site_id, # type: str
body, # type: "models.MicrosoftGraphDrive"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphDrive"
"""Create new navigation property to drives for sites.
Create new navigation property to drives for sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property.
:type body: ~sites.models.MicrosoftGraphDrive
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphDrive, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphDrive
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphDrive"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_drives.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphDrive')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphDrive', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_drives.metadata = {'url': '/sites/{site-id}/drives'} # type: ignore
def get_drives(
self,
site_id, # type: str
drive_id, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum147"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum148"]]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphDrive"
"""Get drives from sites.
Get drives from sites.
:param site_id: key: id of site.
:type site_id: str
:param drive_id: key: id of drive.
:type drive_id: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum147]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum148]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphDrive, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphDrive
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphDrive"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_drives.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'drive-id': self._serialize.url("drive_id", drive_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphDrive', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_drives.metadata = {'url': '/sites/{site-id}/drives/{drive-id}'} # type: ignore
def update_drives(
self,
site_id, # type: str
drive_id, # type: str
body, # type: "models.MicrosoftGraphDrive"
**kwargs # type: Any
):
# type: (...) -> None
"""Update the navigation property drives in sites.
Update the navigation property drives in sites.
:param site_id: key: id of site.
:type site_id: str
:param drive_id: key: id of drive.
:type drive_id: str
:param body: New navigation property values.
:type body: ~sites.models.MicrosoftGraphDrive
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_drives.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'drive-id': self._serialize.url("drive_id", drive_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphDrive')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_drives.metadata = {'url': '/sites/{site-id}/drives/{drive-id}'} # type: ignore
def delete_drives(
self,
site_id, # type: str
drive_id, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete navigation property drives for sites.
Delete navigation property drives for sites.
:param site_id: key: id of site.
:type site_id: str
:param drive_id: key: id of drive.
:type drive_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_drives.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'drive-id': self._serialize.url("drive_id", drive_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_drives.metadata = {'url': '/sites/{site-id}/drives/{drive-id}'} # type: ignore
def list_lists(
self,
site_id, # type: str
orderby=None, # type: Optional[List[Union[str, "models.Enum149"]]]
select=None, # type: Optional[List[Union[str, "models.Enum150"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum151"]]]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.CollectionOfList"]
"""Get lists from sites.
Get lists from sites.
:param site_id: key: id of site.
:type site_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~sites.models.Enum149]
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum150]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum151]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~sites.models.CollectionOfList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_lists.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_lists.metadata = {'url': '/sites/{site-id}/lists'} # type: ignore
def create_lists(
self,
site_id, # type: str
body, # type: "models.MicrosoftGraphList"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphList"
"""Create new navigation property to lists for sites.
Create new navigation property to lists for sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property.
:type body: ~sites.models.MicrosoftGraphList
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphList, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphList
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_lists.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphList')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_lists.metadata = {'url': '/sites/{site-id}/lists'} # type: ignore
def get_lists(
self,
site_id, # type: str
list_id, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum152"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum153"]]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphList"
"""Get lists from sites.
Get lists from sites.
:param site_id: key: id of site.
:type site_id: str
:param list_id: key: id of list.
:type list_id: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum152]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum153]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphList, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphList
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_lists.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'list-id': self._serialize.url("list_id", list_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_lists.metadata = {'url': '/sites/{site-id}/lists/{list-id}'} # type: ignore
def update_lists(
self,
site_id, # type: str
list_id, # type: str
body, # type: "models.MicrosoftGraphList"
**kwargs # type: Any
):
# type: (...) -> None
"""Update the navigation property lists in sites.
Update the navigation property lists in sites.
:param site_id: key: id of site.
:type site_id: str
:param list_id: key: id of list.
:type list_id: str
:param body: New navigation property values.
:type body: ~sites.models.MicrosoftGraphList
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_lists.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'list-id': self._serialize.url("list_id", list_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphList')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_lists.metadata = {'url': '/sites/{site-id}/lists/{list-id}'} # type: ignore
def delete_lists(
self,
site_id, # type: str
list_id, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete navigation property lists for sites.
Delete navigation property lists for sites.
:param site_id: key: id of site.
:type site_id: str
:param list_id: key: id of list.
:type list_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_lists.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'list-id': self._serialize.url("list_id", list_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_lists.metadata = {'url': '/sites/{site-id}/lists/{list-id}'} # type: ignore
def get_activities_by_interval(
self,
site_id, # type: str
start_date_time, # type: str
end_date_time, # type: str
interval, # type: str
**kwargs # type: Any
):
# type: (...) -> List["models.MicrosoftGraphItemActivityStat"]
"""Invoke function getActivitiesByInterval.
Invoke function getActivitiesByInterval.
:param site_id: key: id of site.
:type site_id: str
:param start_date_time:
:type start_date_time: str
:param end_date_time:
:type end_date_time: str
:param interval:
:type interval: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphItemActivityStat, or the result of cls(response)
:rtype: list[~sites.models.MicrosoftGraphItemActivityStat]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphItemActivityStat"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_activities_by_interval.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'startDateTime': self._serialize.url("start_date_time", start_date_time, 'str'),
'endDateTime': self._serialize.url("end_date_time", end_date_time, 'str'),
'interval': self._serialize.url("interval", interval, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphItemActivityStat]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_activities_by_interval.metadata = {'url': '/sites/{site-id}/microsoft.graph.getActivitiesByInterval(startDateTime=\'{startDateTime}\',endDateTime=\'{endDateTime}\',interval=\'{interval}\')'} # type: ignore
def get_by_path(
self,
site_id, # type: str
path, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphSite"
"""Invoke function getByPath.
Invoke function getByPath.
:param site_id: key: id of site.
:type site_id: str
:param path:
:type path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphSite, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphSite
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphSite"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_by_path.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'path': self._serialize.url("path", path, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphSite', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_path.metadata = {'url': '/sites/{site-id}/microsoft.graph.getByPath(path=\'{path}\')'} # type: ignore
def list_pages(
self,
site_id, # type: str
orderby=None, # type: Optional[List[Union[str, "models.Enum219"]]]
select=None, # type: Optional[List[Union[str, "models.Enum220"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum221"]]]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.CollectionOfSitePage"]
"""Get pages from sites.
Get pages from sites.
:param site_id: key: id of site.
:type site_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~sites.models.Enum219]
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum220]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum221]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfSitePage or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~sites.models.CollectionOfSitePage]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfSitePage"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_pages.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfSitePage', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_pages.metadata = {'url': '/sites/{site-id}/pages'} # type: ignore
def create_pages(
self,
site_id, # type: str
body, # type: "models.MicrosoftGraphSitePage"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphSitePage"
"""Create new navigation property to pages for sites.
Create new navigation property to pages for sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property.
:type body: ~sites.models.MicrosoftGraphSitePage
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphSitePage, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphSitePage
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphSitePage"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_pages.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphSitePage')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphSitePage', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_pages.metadata = {'url': '/sites/{site-id}/pages'} # type: ignore
def get_pages(
self,
site_id, # type: str
site_page_id, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum222"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum223"]]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphSitePage"
"""Get pages from sites.
Get pages from sites.
:param site_id: key: id of site.
:type site_id: str
:param site_page_id: key: id of sitePage.
:type site_page_id: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum222]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum223]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphSitePage, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphSitePage
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphSitePage"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_pages.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'sitePage-id': self._serialize.url("site_page_id", site_page_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphSitePage', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_pages.metadata = {'url': '/sites/{site-id}/pages/{sitePage-id}'} # type: ignore
def update_pages(
self,
site_id, # type: str
site_page_id, # type: str
body, # type: "models.MicrosoftGraphSitePage"
**kwargs # type: Any
):
# type: (...) -> None
"""Update the navigation property pages in sites.
Update the navigation property pages in sites.
:param site_id: key: id of site.
:type site_id: str
:param site_page_id: key: id of sitePage.
:type site_page_id: str
:param body: New navigation property values.
:type body: ~sites.models.MicrosoftGraphSitePage
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_pages.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'sitePage-id': self._serialize.url("site_page_id", site_page_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphSitePage')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_pages.metadata = {'url': '/sites/{site-id}/pages/{sitePage-id}'} # type: ignore
def delete_pages(
self,
site_id, # type: str
site_page_id, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete navigation property pages for sites.
Delete navigation property pages for sites.
:param site_id: key: id of site.
:type site_id: str
:param site_page_id: key: id of sitePage.
:type site_page_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_pages.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'sitePage-id': self._serialize.url("site_page_id", site_page_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_pages.metadata = {'url': '/sites/{site-id}/pages/{sitePage-id}'} # type: ignore
def list_sites(
self,
site_id, # type: str
orderby=None, # type: Optional[List[Union[str, "models.Enum224"]]]
select=None, # type: Optional[List[Union[str, "models.Enum225"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum226"]]]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.CollectionOfSite1"]
"""Get sites from sites.
Get sites from sites.
:param site_id: key: id of site.
:type site_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~sites.models.Enum224]
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum225]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum226]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfSite1 or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~sites.models.CollectionOfSite1]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfSite1"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_sites.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfSite1', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_sites.metadata = {'url': '/sites/{site-id}/sites'} # type: ignore
def create_sites(
self,
site_id, # type: str
body, # type: "models.MicrosoftGraphSite"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphSite"
"""Create new navigation property to sites for sites.
Create new navigation property to sites for sites.
:param site_id: key: id of site.
:type site_id: str
:param body: New navigation property.
:type body: ~sites.models.MicrosoftGraphSite
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphSite, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphSite
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphSite"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_sites.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphSite')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphSite', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_sites.metadata = {'url': '/sites/{site-id}/sites'} # type: ignore
def get_sites(
self,
site_id, # type: str
site_id1, # type: str
select=None, # type: Optional[List[Union[str, "models.Enum227"]]]
expand=None, # type: Optional[List[Union[str, "models.Enum228"]]]
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphSite"
"""Get sites from sites.
Get sites from sites.
:param site_id: key: id of site.
:type site_id: str
:param site_id1: key: id of site.
:type site_id1: str
:param select: Select properties to be returned.
:type select: list[str or ~sites.models.Enum227]
:param expand: Expand related entities.
:type expand: list[str or ~sites.models.Enum228]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphSite, or the result of cls(response)
:rtype: ~sites.models.MicrosoftGraphSite
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphSite"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_sites.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'site-id1': self._serialize.url("site_id1", site_id1, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphSite', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_sites.metadata = {'url': '/sites/{site-id}/sites/{site-id1}'} # type: ignore
def update_sites(
self,
site_id, # type: str
site_id1, # type: str
body, # type: "models.MicrosoftGraphSite"
**kwargs # type: Any
):
# type: (...) -> None
"""Update the navigation property sites in sites.
Update the navigation property sites in sites.
:param site_id: key: id of site.
:type site_id: str
:param site_id1: key: id of site.
:type site_id1: str
:param body: New navigation property values.
:type body: ~sites.models.MicrosoftGraphSite
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_sites.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'site-id1': self._serialize.url("site_id1", site_id1, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphSite')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_sites.metadata = {'url': '/sites/{site-id}/sites/{site-id1}'} # type: ignore
def delete_sites(
self,
site_id, # type: str
site_id1, # type: str
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete navigation property sites for sites.
Delete navigation property sites for sites.
:param site_id: key: id of site.
:type site_id: str
:param site_id1: key: id of site.
:type site_id1: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_sites.metadata['url'] # type: ignore
path_format_arguments = {
'site-id': self._serialize.url("site_id", site_id, 'str'),
'site-id1': self._serialize.url("site_id1", site_id1, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_sites.metadata = {'url': '/sites/{site-id}/sites/{site-id1}'} # type: ignore
def add(
self,
body, # type: "models.PathsV2U0Z1SitesMicrosoftGraphAddPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List["models.MicrosoftGraphSite"]
"""Invoke action add.
Invoke action add.
:param body: Action parameters.
:type body: ~sites.models.PathsV2U0Z1SitesMicrosoftGraphAddPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphSite, or the result of cls(response)
:rtype: list[~sites.models.MicrosoftGraphSite]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphSite"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.add.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'PathsV2U0Z1SitesMicrosoftGraphAddPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphSite]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
add.metadata = {'url': '/sites/microsoft.graph.add'} # type: ignore
def delta(
self,
**kwargs # type: Any
):
# type: (...) -> List["models.MicrosoftGraphSite"]
"""Invoke function delta.
Invoke function delta.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphSite, or the result of cls(response)
:rtype: list[~sites.models.MicrosoftGraphSite]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphSite"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delta.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphSite]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delta.metadata = {'url': '/sites/microsoft.graph.delta()'} # type: ignore
def remove(
self,
body, # type: "models.Paths8Behs0SitesMicrosoftGraphRemovePostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List["models.MicrosoftGraphSite"]
"""Invoke action remove.
Invoke action remove.
:param body: Action parameters.
:type body: ~sites.models.Paths8Behs0SitesMicrosoftGraphRemovePostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphSite, or the result of cls(response)
:rtype: list[~sites.models.MicrosoftGraphSite]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphSite"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.remove.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths8Behs0SitesMicrosoftGraphRemovePostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphSite]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
remove.metadata = {'url': '/sites/microsoft.graph.remove'} # type: ignore
| 44.835889
| 214
| 0.633196
| 14,081
| 128,679
| 5.605781
| 0.021305
| 0.020751
| 0.013555
| 0.0188
| 0.946602
| 0.943523
| 0.929904
| 0.921302
| 0.903376
| 0.897675
| 0
| 0.007825
| 0.255185
| 128,679
| 2,869
| 215
| 44.851516
| 0.81577
| 0.270565
| 0
| 0.850296
| 0
| 0
| 0.092658
| 0.021655
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036095
| false
| 0
| 0.005325
| 0
| 0.089941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8c5ef186939dc52a723f26a4f2acb470a4beb51c
| 159
|
py
|
Python
|
cashaccount/__init__.py
|
YumenoG/pycashaccount
|
f2a22eec729cc7b608241e1632d2ccde5fcc3bbc
|
[
"MIT"
] | 2
|
2019-02-20T12:28:19.000Z
|
2019-02-20T12:28:22.000Z
|
cashaccount/__init__.py
|
YumenoG/pycashaccount
|
f2a22eec729cc7b608241e1632d2ccde5fcc3bbc
|
[
"MIT"
] | 5
|
2019-01-03T19:35:17.000Z
|
2019-02-20T12:34:11.000Z
|
cashaccount/__init__.py
|
YumenoG/pycashaccount
|
f2a22eec729cc7b608241e1632d2ccde5fcc3bbc
|
[
"MIT"
] | 1
|
2019-10-16T11:30:33.000Z
|
2019-10-16T11:30:33.000Z
|
from .payment import KeyHashInfo, ScriptHashInfo, PaymentCodeInfo
from .registration import Registration
from .registration import electron_markdown, opreturn
| 39.75
| 65
| 0.867925
| 16
| 159
| 8.5625
| 0.625
| 0.233577
| 0.321168
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09434
| 159
| 3
| 66
| 53
| 0.951389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8b04120fbbfb9dd238177f79cf95478afc695b61
| 44,106
|
py
|
Python
|
venv/lib/python3.8/site-packages/azureml/_restclient/operations/run_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azureml/_restclient/operations/run_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azureml/_restclient/operations/run_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator 2.3.33.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class RunOperations(object):
"""RunOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_child(
self, subscription_id, resource_group_name, workspace_name, experiment_name, run_id, filter=None, continuation_token=None, orderby=None, sortorder=None, top=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param run_id:
:type run_id: str
:param filter:
:type filter: str
:param continuation_token:
:type continuation_token: str
:param orderby:
:type orderby: list[str]
:param sortorder: Possible values include: 'Asc', 'Desc'
:type sortorder: str
:param top:
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedRunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedRunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_child.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if continuation_token is not None:
query_parameters['$continuationToken'] = self._serialize.query("continuation_token", continuation_token, 'str')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if sortorder is not None:
query_parameters['$sortorder'] = self._serialize.query("sortorder", sortorder, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedRunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_child.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/children'}
def get_token(
self, subscription_id, resource_group_name, workspace_name, experiment_name, run_id, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param run_id:
:type run_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: TokenResult or ClientRawResponse if raw=true
:rtype: ~_restclient.models.TokenResult or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_token.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('TokenResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_token.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/token'}
def get_details(
self, subscription_id, resource_group_name, workspace_name, experiment_name, run_id, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param run_id:
:type run_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunDetailsDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunDetailsDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_details.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunDetailsDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_details.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/details'}
def get(
self, subscription_id, resource_group_name, workspace_name, experiment_name, run_id, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param run_id:
:type run_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}'}
def patch(
self, subscription_id, resource_group_name, workspace_name, experiment_name, run_id, create_run_dto=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param run_id:
:type run_id: str
:param create_run_dto:
:type create_run_dto: ~_restclient.models.CreateRunDto
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.patch.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if create_run_dto is not None:
body_content = self._serialize.body(create_run_dto, 'CreateRunDto')
else:
body_content = None
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
patch.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}'}
def get_by_exp_id(
self, subscription_id, resource_group_name, workspace_name, experiment_id, run_id, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_id:
:type experiment_id: str
:param run_id:
:type run_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_by_exp_id.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentId': self._serialize.url("experiment_id", experiment_id, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_by_exp_id.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}'}
def patch_by_exp_id(
self, subscription_id, resource_group_name, workspace_name, experiment_id, run_id, create_run_dto=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_id:
:type experiment_id: str
:param run_id:
:type run_id: str
:param create_run_dto:
:type create_run_dto: ~_restclient.models.CreateRunDto
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.patch_by_exp_id.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentId': self._serialize.url("experiment_id", experiment_id, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if create_run_dto is not None:
body_content = self._serialize.body(create_run_dto, 'CreateRunDto')
else:
body_content = None
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
patch_by_exp_id.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}'}
def batch_add_or_modify(
self, subscription_id, resource_group_name, workspace_name, experiment_name, request_dto, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param request_dto:
:type request_dto: ~_restclient.models.BatchAddOrModifyRunRequestDto
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: BatchAddOrModifyRunResultDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.BatchAddOrModifyRunResultDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.batch_add_or_modify.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(request_dto, 'BatchAddOrModifyRunRequestDto')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BatchAddOrModifyRunResultDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
batch_add_or_modify.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/runs'}
def get_by_query(
self, subscription_id, resource_group_name, workspace_name, experiment_name, query_params=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param query_params:
:type query_params: ~_restclient.models.QueryParamsDto
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedRunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedRunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_by_query.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if query_params is not None:
body_content = self._serialize.body(query_params, 'QueryParamsDto')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedRunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_by_query.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs:query'}
def list_by_compute(
self, subscription_id, resource_group_name, workspace_name, compute_name, filter=None, continuation_token=None, top=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param compute_name:
:type compute_name: str
:param filter:
:type filter: str
:param continuation_token:
:type continuation_token: str
:param top:
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedRunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedRunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.list_by_compute.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'computeName': self._serialize.url("compute_name", compute_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if continuation_token is not None:
query_parameters['$continuationToken'] = self._serialize.query("continuation_token", continuation_token, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedRunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
list_by_compute.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/runs'}
def get_counts(
self, subscription_id, resource_group_name, workspace_name, experiment_name, filter=None, continuation_token=None, orderby=None, sortorder=None, top=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param filter:
:type filter: str
:param continuation_token:
:type continuation_token: str
:param orderby:
:type orderby: list[str]
:param sortorder: Possible values include: 'Asc', 'Desc'
:type sortorder: str
:param top:
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunCountsDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunCountsDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_counts.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if continuation_token is not None:
query_parameters['$continuationToken'] = self._serialize.query("continuation_token", continuation_token, 'str')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if sortorder is not None:
query_parameters['$sortorder'] = self._serialize.query("sortorder", sortorder, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunCountsDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_counts.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runcounts'}
def delete_tags(
self, subscription_id, resource_group_name, workspace_name, experiment_name, run_id, tags, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param experiment_name:
:type experiment_name: str
:param run_id:
:type run_id: str
:param tags:
:type tags: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.delete_tags.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'experimentName': self._serialize.url("experiment_name", experiment_name, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(tags, '[str]')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
delete_tags.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags'}
def get_workspace_run(
self, subscription_id, resource_group_name, workspace_name, run_id, custom_headers=None, raw=False, **operation_config):
"""Gets the specified run within the specified workspace.
Gets the specified run within the specified workspace.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param run_id:
:type run_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RunDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.RunDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_workspace_run.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'runId': self._serialize.url("run_id", run_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_workspace_run.metadata = {'url': '/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}'}
| 46.039666
| 239
| 0.657121
| 4,502
| 44,106
| 6.217903
| 0.043314
| 0.036688
| 0.039474
| 0.012074
| 0.94788
| 0.945736
| 0.943486
| 0.942057
| 0.92716
| 0.92716
| 0
| 0.003722
| 0.25076
| 44,106
| 957
| 240
| 46.087774
| 0.84337
| 0.305174
| 0
| 0.801453
| 0
| 0.031477
| 0.202779
| 0.098963
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033898
| false
| 0
| 0.004843
| 0
| 0.106538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b193a7cb99a37671658cdc6e43a0663925262d0
| 113
|
py
|
Python
|
LnkParse3/target/printers.py
|
ernix/LnkParse3
|
ab8b2c796a501b103eb74142762e7fe9f4f1960a
|
[
"MIT"
] | null | null | null |
LnkParse3/target/printers.py
|
ernix/LnkParse3
|
ab8b2c796a501b103eb74142762e7fe9f4f1960a
|
[
"MIT"
] | null | null | null |
LnkParse3/target/printers.py
|
ernix/LnkParse3
|
ab8b2c796a501b103eb74142762e7fe9f4f1960a
|
[
"MIT"
] | null | null | null |
from LnkParse3.target.lnk_target_base import LnkTargetBase
class Printers(LnkTargetBase):
# TODO:
pass
| 16.142857
| 58
| 0.769912
| 13
| 113
| 6.538462
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010638
| 0.168142
| 113
| 6
| 59
| 18.833333
| 0.893617
| 0.044248
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
8b2d02959ac26136a5b5ef98911ad974c5416e52
| 112
|
py
|
Python
|
snapx/snapx/algorithms/shortest_paths/__init__.py
|
ruth-ann/snap-python
|
fe98de7b5697b3d60eb3497893e24801ae1916f9
|
[
"BSD-3-Clause"
] | 242
|
2015-01-01T08:40:28.000Z
|
2022-03-18T05:22:09.000Z
|
snapx/snapx/algorithms/shortest_paths/__init__.py
|
ruth-ann/snap-python
|
fe98de7b5697b3d60eb3497893e24801ae1916f9
|
[
"BSD-3-Clause"
] | 99
|
2015-01-24T07:55:27.000Z
|
2021-10-30T18:20:13.000Z
|
snapx/snapx/algorithms/shortest_paths/__init__.py
|
ruth-ann/snap-python
|
fe98de7b5697b3d60eb3497893e24801ae1916f9
|
[
"BSD-3-Clause"
] | 105
|
2015-03-03T06:45:17.000Z
|
2022-02-24T15:52:40.000Z
|
from snapx.algorithms.shortest_paths.weighted import *
from snapx.algorithms.shortest_paths.unweighted import *
| 37.333333
| 56
| 0.857143
| 14
| 112
| 6.714286
| 0.571429
| 0.191489
| 0.404255
| 0.574468
| 0.680851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 112
| 2
| 57
| 56
| 0.903846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8c7560f03dbb3526ccb30d3de812d6405f5d7597
| 173,875
|
py
|
Python
|
CEDA/macroecon/eu.py
|
TerenceLiu98/CEDApy
|
14f21f6a8e83bd62afbf9be1465000db3c6b6dce
|
[
"MIT"
] | null | null | null |
CEDA/macroecon/eu.py
|
TerenceLiu98/CEDApy
|
14f21f6a8e83bd62afbf9be1465000db3c6b6dce
|
[
"MIT"
] | null | null | null |
CEDA/macroecon/eu.py
|
TerenceLiu98/CEDApy
|
14f21f6a8e83bd62afbf9be1465000db3c6b6dce
|
[
"MIT"
] | null | null | null |
import io
import os
import demjson
import requests
import numpy as np
import pandas as pd
from fake_useragent import UserAgent
from pandas.core.frame import DataFrame
from pandas.core.reshape.merge import merge
# Main Economic Indicators: https://alfred.stlouisfed.org/release?rid=205
url = {
"fred_econ": "https://fred.stlouisfed.org/graph/fredgraph.csv?",
"eurostat": "http://ec.europa.eu/eurostat/wdds/rest/data/v2.1/json/en/",
"ecb": "https://sdw-wsrest.ecb.europa.eu/service/data/",
"OECD": "https://stats.oecd.org/sdmx-json/data/DP_LIVE/"
}
def merge_data(data_1: pd.DataFrame, data_2: pd.DataFrame, col_name: str):
data = pd.merge_asof(data_1, data_2, on=col_name)
return data
def National_Account():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=NAEXCP04EZQ189S,NAEXCP02EZQ189S,NAEXCP01EZQ189S,NAEXCP06EZQ189S,NAEXCP07EZQ189S,NAEXCP03EZQ189S,NAGIGP01EZQ661S,NAEXKP06EZQ659S,NAEXKP04EZQ659S,NAEXKP01EZQ652S,NAEXKP07EZQ652S,NAEXKP03EZQ659S&scale=left,left,left,left,left,left,left,left,left,left,left,left&cosd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1996-01-01,1996-01-01,1995-01-01,1995-01-01,1996-01-01&coed=2020-10-01,2020-10-01,2020-10-01,2020-10-01,2020-10-01,2020-10-01,2020-10-01,2020-10-01,2020-10-01,2021-01-01,2020-10-01,2020-10-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92,%2391e8e1,%238d4653,%238085e8&link_values=false,false,false,false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9,10,11,12&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1996-01-01,1996-01-01,1995-01-01,1995-01-01,1996-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'NAEXCP04EZQ189S': "Gross Domestic Product by Expenditure in Current Prices: Gross Fixed Capital Formation for the Euro Area",
'NAEXCP02EZQ189S': "Gross Domestic Product by Expenditure in Current Prices: Private Final Consumption Expenditure for the Euro Area",
'NAEXCP01EZQ189S': "Gross Domestic Product by Expenditure in Current Prices: Total Gross Domestic Product for the Euro Area",
'NAEXCP06EZQ189S': "Gross Domestic Product by Expenditure in Current Prices: Exports of Goods and Services for the Euro Area",
'NAEXCP07EZQ189S': "Gross Domestic Product by Expenditure in Current Prices: Less Imports of Goods and Services for the Euro Area",
'NAEXCP03EZQ189S': "Gross Domestic Product by Expenditure in Current Prices: Government Final Consumption Expenditure for the Euro Area",
'NAGIGP01EZQ661S': "Gross Domestic Product Deflator for the Euro Area",
'NAEXKP06EZQ659S': "Gross Domestic Product by Expenditure in Constant Prices: Exports of Goods and Services for the Euro Area",
'NAEXKP04EZQ659S': "Gross Domestic Product by Expenditure in Constant Prices: Gross Fixed Capital Formation for the Euro Area",
'NAEXKP01EZQ652S': "Gross Domestic Product by Expenditure in Constant Prices: Total Gross Domestic Product for the Euro Area",
'NAEXKP07EZQ652S': "Gross Domestic Product by Expenditure in Constant Prices: Less: Imports of Goods and Services for the Euro Area",
'NAEXKP03EZQ659S': "Gross Domestic Product by Expenditure in Constant Prices: Government Final Consumption Expenditure for the Euro Area"}
description = "National Accounts, Quarterly, Seasonally, Adjusted"
return df, name_list, description
def International_Trade():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=XTEXVA01EZQ188S,XTIMVA01EZQ188S,EA19XTNTVA01STSAQ&scale=left,left,left&cosd=1995-01-01,1995-01-01,1995-01-01&coed=2020-10-01,2020-10-01,2017-04-01&line_color=%234572a7,%23aa4643,%2389a54e&link_values=false,false,false&line_style=solid,solid,solid&mark_type=none,none,none&mw=3,3,3&lw=2,2,2&ost=-99999,-99999,-99999&oet=99999,99999,99999&mma=0,0,0&fml=a,a,a&fq=Quarterly,Quarterly,Quarterly&fam=avg,avg,avg&fgst=lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2017-04-01&line_index=1,2,3&transformation=lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07&nd=1995-01-01,1995-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'XTEXVA01EZQ188S': "Exports: Value Goods for the Euro Area",
'XTIMVA01EZQ188SS': "Imports: Value Goods for the Euro Area",
'EA19XTNTVA01STSAQ': "International Trade: Net trade: Value (goods): Total for the Euro Area"}
description = "International Trade, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Balance_of_Payments_BPM6():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19B6BLTT02STSAQ,EA19B6DBSE02STSAQ,EA19B6DBSE03STSAQ,EA19B6CRSE03STSAQ,EA19B6CRSE02STSAQ&scale=left,left,left,left,left&cosd=1999-01-01,1999-01-01,1999-01-01,1999-01-01,1999-01-01&coed=2020-10-01,2020-10-01,2020-10-01,2020-10-01,2020-10-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae&link_values=false,false,false,false,false&line_style=solid,solid,solid,solid,solid&mark_type=none,none,none,none,none&mw=3,3,3,3,3&lw=2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999&mma=0,0,0,0,0&fml=a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5&transformation=lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1999-01-01,1999-01-01,1999-01-01,1999-01-01,1999-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19B6BLTT02STSAQ': "Balance of payments BPM6: Current account Debits: Services: Total Debits as % of Current account for the Euro Area",
'EA19B6DBSE02STSAQ': "Balance of payments BPM6: Current account Debits: Services: Total Debits as % of Current account for the Euro Area",
'EA19B6DBSE03STSAQ': "Balance of payments BPM6: Current account Debits: Services: Total Debits as % of Goods and Services for the Euro Area",
'EA19B6CRSE03STSAQ': "Balance of payments BPM6: Current account Credits: Services: Total Credits as % of Goods and Services for Euro Area",
'EA19B6CRSE02STSAQ': "Balance of payments BPM6: Current account Credits: Services: Total Credits as % of Current account for Euro Area"}
description = "Balanced of payments BPM6, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Leading_Indicators_OECD(startdate = "1950-01", enddate = "2021-05"):
# CLI
tmp_url = url["OECD"] + "EA19.CLI.AMPLITUD.LTRENDIDX.M/OECD"
ua = UserAgent(verify_ssl=False)
request_params = {
"contentType": "csv",
"detail": "code",
"separator": "comma",
"csv-lang": "en",
"startPeriod": "{}".format(startdate),
"endPeriod": "{}".format(enddate)
}
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, params = request_params, headers=request_header)
data_text = r.content
df_cli = pd.read_csv(io.StringIO(data_text.decode('utf-8')))[["TIME", "Value"]]
df_cli.columns = ["Date", "EU_OECD_CLI"]
df_cli["Date"] = pd.to_datetime(df_cli["Date"], format = "%Y-%m")
df_cli["EU_OECD_CLI"] = df_cli["EU_OECD_CLI"].astype(float)
#BCI
tmp_url = url["OECD"] + "EA19.BCI.AMPLITUD.LTRENDIDX.M/OECD"
ua = UserAgent(verify_ssl=False)
request_params = {
"contentType": "csv",
"detail": "code",
"separator": "comma",
"csv-lang": "en",
"startPeriod": "{}".format(startdate),
"endPeriod": "{}".format(enddate)
}
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, params = request_params, headers=request_header)
data_text = r.content
df_bci = pd.read_csv(io.StringIO(data_text.decode('utf-8')))[["TIME", "Value"]]
df_bci.columns = ["Date", "EU_OECD_BCI"]
df_bci["Date"] = pd.to_datetime(df_bci["Date"], format = "%Y-%m")
df_bci["EU_OECD_BCI"] = df_bci["EU_OECD_BCI"].astype(float)
# CCI
tmp_url = url["OECD"] + "EA19.CCI.AMPLITUD.LTRENDIDX.M/OECD"
ua = UserAgent(verify_ssl=False)
request_params = {
"contentType": "csv",
"detail": "code",
"separator": "comma",
"csv-lang": "en",
"startPeriod": "{}".format(startdate),
"endPeriod": "{}".format(enddate)
}
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, params = request_params, headers=request_header)
data_text = r.content
df_cci = pd.read_csv(io.StringIO(data_text.decode('utf-8')))[["TIME", "Value"]]
df_cci.columns = ["Date", "EU_OECD_CCI"]
df_cci["Date"] = pd.to_datetime(df_cci["Date"], format = "%Y-%m")
df_cci["EU_OECD_CCI"] = df_cci["EU_OECD_CCI"].astype(float)
df = pd.merge_asof(df_cli, df_bci, on = "Date")
df = pd.merge_asof(df, df_cci, on = "Date")
return df
def Monetary_Aggregates_Monthly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19MABMM301GYSAM,EA19MANMM101IXOBSAM&scale=left,left&cosd=1971-01-01,1970-01-01&coed=2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643&link_values=false,false&line_style=solid,solid&mark_type=none,none&mw=3,3&lw=2,2&ost=-99999,-99999&oet=99999,99999&mma=0,0&fml=a,a&fq=Monthly,Monthly&fam=avg,avg&fgst=lin,lin&fgsnd=2020-02-01,2020-02-01&line_index=1,2&transformation=lin,lin&vintage_date=2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07&nd=1971-01-01,1970-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {'EA19MABMM301GYSAM': "Monetary aggregates and their components: Broad money and components: M3: M3 for the Euro Area",
'EA19MANMM101IXOBSAM': "Monetary aggregates and their components: Narrow money and components: M1 and components: M1 for the Euro Area"}
description = "Monetary aggregates and their components, Monthly, Seasonally Adjusted"
return df, name_list, description
def Monetary_Aggregates_Quarterly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=MABMM301EZQ189S,MANMM101EZQ189S&scale=left,left&cosd=1970-01-01,1970-01-01&coed=2021-01-01,2021-01-01&line_color=%234572a7,%23aa4643&link_values=false,false&line_style=solid,solid&mark_type=none,none&mw=3,3&lw=2,2&ost=-99999,-99999&oet=99999,99999&mma=0,0&fml=a,a&fq=Quarterly,Quarterly&fam=avg,avg&fgst=lin,lin&fgsnd=2020-02-01,2020-02-01&line_index=1,2&transformation=lin,lin&vintage_date=2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07&nd=1970-01-01,1970-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'MABMM301EZQ189S': "M3 for the Euro Area",
'MANMM101EZQ189S': "M1 for the Euro Area"
}
description = "Monetary aggregates and their components, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Currency_Conversion_Quarterly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=CCEUSP02EZQ655N,CCUSMA02EZQ618N,CCUSSP01EZQ650N,CCRETT02EZQ661N,CCRETT01EZQ661N&scale=left,left,left,left,left&cosd=1999-01-01,1979-01-01,1999-01-01,1970-01-01,1970-01-01&coed=2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae&link_values=false,false,false,false,false&line_style=solid,solid,solid,solid,solid&mark_type=none,none,none,none,none&mw=3,3,3,3,3&lw=2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999&mma=0,0,0,0,0&fml=a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5&transformation=lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1999-01-01,1979-01-01,1999-01-01,1970-01-01,1970-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'CCEUSP02EZQ655N': "National Currency to Euro Spot Exchange Rate for the Euro Area",
'CCUSMA02EZQ618N': "National Currency to US Dollar Exchange Rate: Average of Daily Rates for the Euro Area",
'CCUSSP01EZQ650N': "US Dollar to National Currency Spot Exchange Rate for the Euro Area",
'CCRETT02EZQ661N': "Real Effective Exchange Rates Based on Manufacturing Unit Labor Cost for the Euro Area",
'CCRETT01EZQ661N': "Real Effective Exchange Rates Based on Manufacturing Consumer Price Index for the Euro Area"}
description = "Currency Conversions, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Currency_Conversion_Monthly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=CCRETT01EZM661N,CCUSMA02EZM659N,CCUSSP01EZM650N,CCEUSP02EZM655N&scale=left,left,left,left&cosd=1970-01-01,1991-01-01,1999-01-01,1999-01-01&coed=2021-04-01,2021-04-01,2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b&link_values=false,false,false,false&line_style=solid,solid,solid,solid&mark_type=none,none,none,none&mw=3,3,3,3&lw=2,2,2,2&ost=-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999&mma=0,0,0,0&fml=a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg&fgst=lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4&transformation=lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1970-01-01,1991-01-01,1999-01-01,1999-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'CCRETT01EZM661N': "Real Effective Exchange Rates Based on Manufacturing Consumer Price Index for the Euro Area",
'CCUSMA02EZM659N': "National Currency to US Dollar Exchange Rate: Average of Daily Rates for the Euro Area",
'CCUSSP01EZM650N': "US Dollar to National Currency Spot Exchange Rate for the Euro Area",
'CCEUSP02EZM655N': "National Currency to Euro Spot Exchange Rate for the Euro Area"}
description = "Currency Conversions, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def Interest_Rates_Quarterly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=IRLTLT01EZQ156N,IR3TIB01EZQ156N,IRSTCI01EZQ156N&scale=left,left,left&cosd=1970-01-01,1994-01-01,1994-01-01&coed=2021-01-01,2021-01-01,2021-01-01&line_color=%234572a7,%23aa4643,%2389a54e&link_values=false,false,false&line_style=solid,solid,solid&mark_type=none,none,none&mw=3,3,3&lw=2,2,2&ost=-99999,-99999,-99999&oet=99999,99999,99999&mma=0,0,0&fml=a,a,a&fq=Quarterly,Quarterly,Quarterly&fam=avg,avg,avg&fgst=lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3&transformation=lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07&nd=1970-01-01,1994-01-01,1994-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'IRLTLT01EZQ156N': "Long-Term Government Bond Yields: 10-year: Main (Including Benchmark) for the Euro Area",
'IR3TIB01EZQ156N': "3-Month or 90-day Rates and Yields: Interbank Rates for the Euro Area",
'IRSTCI01EZQ156N': "Immediate Rates: Less than 24 Hours: Call Money/Interbank Rate for the Euro Area"}
description = "Interest Rates, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Interest_Rates_Monthly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=IRLTLT01EZM156N,IR3TIB01EZM156N,IRSTCI01EZM156N&scale=left,left,left&cosd=1970-01-01,1994-01-01,1994-01-01&coed=2021-04-01,2021-04-01,2021-04-01&line_color=%234572a7,%23aa4643,%2389a54e&link_values=false,false,false&line_style=solid,solid,solid&mark_type=none,none,none&mw=3,3,3&lw=2,2,2&ost=-99999,-99999,-99999&oet=99999,99999,99999&mma=0,0,0&fml=a,a,a&fq=Monthly,Monthly,Monthly&fam=avg,avg,avg&fgst=lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3&transformation=lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07&nd=1970-01-01,1994-01-01,1994-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'IRLTLT01EZM156N': "Long-Term Government Bond Yields: 10-year: Main (Including Benchmark) for the Euro Area",
'IR3TIB01EZM156N': "3-Month or 90-day Rates and Yields: Interbank Rates for the Euro Area",
'IRSTCI01EZM156N': "Immediate Rates: Less than 24 Hours: Call Money/Interbank Rate for the Euro Area"}
description = "Interest Rates, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def Share_Prices_Quarterly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=SPASTT01EZQ661N&scale=left&cosd=1987-01-01&coed=2021-01-01&line_color=%234572a7&link_values=false&line_style=solid&mark_type=none&mw=3&lw=2&ost=-99999&oet=99999&mma=0&fml=a&fq=Quarterly&fam=avg&fgst=lin&fgsnd=2020-02-01&line_index=1&transformation=lin&vintage_date=2021-06-07&revision_date=2021-06-07&nd=1987-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'SPASTT01EZQ661N': "Total Share Prices for All Shares for the Euro Area"}
description = "Share Prices, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Share_Prices_Monthly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=1168&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=SPASTT01EZM661N&scale=left&cosd=1986-12-01&coed=2021-04-01&line_color=%234572a7&link_values=false&line_style=solid&mark_type=none&mw=3&lw=2&ost=-99999&oet=99999&mma=0&fml=a&fq=Monthly&fam=avg&fgst=lin&fgsnd=2020-02-01&line_index=1&transformation=lin&vintage_date=2021-06-07&revision_date=2021-06-07&nd=1986-12-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'SPASTT01EZM661N': "Total Share Prices for All Shares for the Euro Area"}
description = "Share Prices, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def CPI_Monthly(startdate="1970-01-01", enddate="2021-01-01"):
"""
"""
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=CPHPTT01EZM661N,EA19CPHP0401IXOBM,EA19CPHP0403IXOBM,EA19CPHP0404IXOBM,EA19CPHP0405IXOBM,EA19CPHP0500IXOBM,EA19CPHP0600IXOBM,EA19CPHP0700IXOBM,EA19CPHP0702IXOBM,EA19CPHP0800IXOBM,EA19CPHP0900IXOBM,CPHPEN01EZM661N&scale=left,left,left,left,left,left,left,left,left,left,left,left&cosd=1990-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01&coed=2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92,%2391e8e1,%238d4653,%238085e8&link_values=false,false,false,false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9,10,11,12&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1990-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01,1996-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
"CPHPTT01EZM661N": "CPI:Harmonized Prices: Total All Items for the Euro Area",
"EA19CPHP0401IXOBM": "CPI:Harmonised_Price:Housing, water, electricity, gas and other fuels (COICOP 04): Actual rentals for housing for the Euro Area",
"EA19CPHP0403IXOBM": "CPI:Harmonised_Price:Housing, water, electricity, gas and other fuels (COICOP 04): Maintenance & repairs of the dwellings for the Euro Area",
"EA19CPHP0404IXOBM": "CPI:Harmonised_Price:Housing, water, electricity, gas and other fuels (COICOP 04): Water supply and miscellaneous services relating to the dwelling for the Euro Area",
"EA19CPHP0405IXOBM": "CPI:Harmonised_Price:Housing, water, electricity, gas and other fuels (COICOP 04): Electricity, gas and other fuels for the Euro Area",
"EA19CPHP0500IXOBM": "CPI:Harmonised_Price:Furnishings, household equip. and routine household maintenance (COICOP 05): Total for the Euro Area ",
"EA19CPHP0600IXOBM": "CPI:Harmonised_Price:Health (COICOP 06): Total for the Euro Area",
"EA19CPHP0700IXOBM": "CPI:Harmonised_Price:Transport (COICOP 07): Total for the Euro Area",
"EA19CPHP0702IXOBM": "CPI:Harmonised_Price:Transport (COICOP 07): Fuels and lubricants for personal transport equipment for the Euro Area",
"EA19CPHP0800IXOBM": "CPI:Harmonised_Price:Communication (COICOP 08): Total for the Euro Area",
"EA19CPHP0900IXOBM": "CPI:Harmonised_Price:Recreation and culture (COICOP 09): Total for the Euro Area",
"CPHPEN01EZM661N": "CPI:Harmonized Prices: Total Energy for the Euro Area"}
description = "Consumer Price Index, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def CPI_Quarterly():
"""
"""
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19CPALTT01GYQ,EA19CPGRLE01GYQ,EA19CPGREN01GYQ,EA19CPHP0401IXOBQ&scale=left,left,left,left&cosd=1991-01-01,1997-01-01,1997-01-01,1996-01-01&coed=2021-01-01,2021-01-01,2021-01-01,2021-01-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b&link_values=false,false,false,false&line_style=solid,solid,solid,solid&mark_type=none,none,none,none&mw=3,3,3,3&lw=2,2,2,2&ost=-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999&mma=0,0,0,0&fml=a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg&fgst=lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4&transformation=lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1991-01-01,1997-01-01,1997-01-01,1996-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19CPALTT01GYQ': "CPI:All items:Total:Total for the Euro Area",
'EA19CPGRLE01GYQ': "CPI:OECD Groups:All items non-food non-energy:Total for the Euro Area",
'EA19CPGREN01GYQ': "CPI:OECD Groups:Energy (Fuel, electricity & gasoline):Total for the Euro Area",
'EA19CPHP0401IXOBQ': "CPI:Harmonised prices:Housing, water, electricity, gas and other fuels (COICOP 04):Actual rentals for housing for the Euro Area"}
description = "Consumer Price Index, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def PPI_Monthly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=PIEAMP02EZM659N,PIEAMP01EZM661N,PIEATI01EZM661N,PIEATI02EZM661N,PITGND02EZM661N,PITGND01EZM661N,PITGIG01EZM661N,PITGIG02EZM661N,PIEAFD02EZM661N,PITGCG02EZM661N,PITGCG01EZM661N,PITGCD01EZM661N&scale=left,left,left,left,left,left,left,left,left,left,left,left&cosd=1996-01-01,2000-01-01,2000-01-01,2000-01-01,1995-01-01,2000-01-01,2000-01-01,1995-01-01,1995-01-01,1995-01-01,2000-01-01,2000-01-01&coed=2021-03-01,2021-02-01,2021-02-01,2021-03-01,2021-03-01,2021-02-01,2021-02-01,2021-03-01,2021-03-01,2021-03-01,2021-02-01,2021-02-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92,%2391e8e1,%238d4653,%238085e8&link_values=false,false,false,false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9,10,11,12&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1996-01-01,2000-01-01,2000-01-01,2000-01-01,1995-01-01,2000-01-01,2000-01-01,1995-01-01,1995-01-01,1995-01-01,2000-01-01,2000-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'PIEAMP02EZM659N': "Producer Prices Index: Economic Activities: Domestic Manufacturing for the Euro Area",
"PIEAMP01EZM661N": "Producer Prices Index: Economic Activities: Total Manufacturing for the Euro Area",
"PIEATI01EZM661N": "Producer Prices Index: Economic Activities: Total Industrial Activities for the Euro Area",
"PIEATI02EZM661N": "Producer Prices Index: Economic Activities: Domestic Industrial Activities for the Euro Area",
"PITGND02EZM661N": "Producer Prices Index: Domestic Nondurable Consumer Goods for the Euro Area",
"PITGND01EZM661N": "Producer Prices Index: Total Nondurable Consumer Goods for the Euro Area",
"PITGIG01EZM661N": "Producer Prices Index: Total Intermediate Goods for the Euro Area",
"PITGIG02EZM661N": "Producer Prices Index: Domestic Intermediate Goods for the Euro Area",
"PIEAFD02EZM661N": "Producer Prices Index: Economic Activities: Domestic Manufacture of Food Products for the Euro Area",
"PITGCG02EZM661N": "Producer Prices Index: Domestic Consumer Goods for the Euro Area",
"PITGCG01EZM661N": "Producer Prices Index: Total Consumer Goods for the Euro Area",
"PITGCD01EZM661N": "Producer Prices Index: Total Durable Consumer Goods for the Euro Area"}
description = "Producer Prices Index, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def PPI_Quarterly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=PIEAFD01EZQ661N,PIEAEN02EZQ661N,PIEAEN01EZQ661N,PITGND02EZQ661N,PITGND01EZQ661N,PITGIG01EZQ661N,PITGIG02EZQ661N,PIEAFD02EZQ661N,PITGCD02EZQ661N,PITGCD01EZQ661N,PITGVG01EZQ661N,PITGVG02EZQ661N&scale=left,left,left,left,left,left,left,left,left,left,left,left&cosd=2000-01-01,2000-01-01,2000-01-01,1995-01-01,2000-01-01,2000-01-01,1995-01-01,1995-01-01,2000-01-01,2000-01-01,2000-01-01,1995-01-01&coed=2020-10-01,2021-01-01,2020-10-01,2021-01-01,2020-10-01,2020-10-01,2021-01-01,2021-01-01,2021-01-01,2020-10-01,2020-10-01,2021-01-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92,%2391e8e1,%238d4653,%238085e8&link_values=false,false,false,false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9,10,11,12&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=2000-01-01,2000-01-01,2000-01-01,1995-01-01,2000-01-01,2000-01-01,1995-01-01,1995-01-01,2000-01-01,2000-01-01,2000-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'PIEAFD01EZQ661N': "Producer Prices Index: Economic Activities: Total Manufacture of Food Products for the Euro Area",
"PIEAEN02EZQ661N": "Producer Prices Index: Economic Activities: Domestic Energy for the Euro Area",
"PIEAEN01EZQ661N": "Producer Prices Index: Economic Activities: Total Energy for the Euro Area",
"PITGND02EZQ661N": "Producer Prices Index: Domestic Nondurable Consumer Goods for the Euro Area",
"PITGND01EZQ661N": "Producer Prices Index: Total Nondurable Consumer Goods for the Euro Area",
"PITGIG01EZQ661N": "Producer Prices Index: Total Intermediate Goods for the Euro Area",
"PITGIG02EZQ661N": "Producer Prices Index: Domestic Intermediate Goods for the Euro Area",
"PIEAFD02EZQ661N": "Producer Prices Index: Economic Activities: Domestic Manufacture of Food Products for the Euro Area",
"PITGCD02EZQ661N": "Producer Prices Index: Domestic Durable Consumer Goods for the Euro Area",
"PITGCD01EZQ661N": "Producer Prices Index: Total Durable Consumer Goods for the Euro Area",
"PITGVG01EZQ661N": "Producer Prices Index: Investments Goods: Total for the Euro Area",
"PITGVG02EZQ661N": "Producer Prices Index: Domestic Investments Goods for the Euro Area"}
description = "Producer Prices Index, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Business_Tendency_Surveys_Construction():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19BCBUTE02STSAM,BCOBLV02EZM460S,BCEMFT02EZM460S,BCCICP02EZM460S,BCSPFT02EZM460S&scale=left,left,left,left,left&cosd=1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01&coed=2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae&link_values=false,false,false,false,false&line_style=solid,solid,solid,solid,solid&mark_type=none,none,none,none,none&mw=3,3,3,3,3&lw=2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999&mma=0,0,0,0,0&fml=a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5&transformation=lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19BCBUTE02STSAM': "Business tendency surveys (construction): Business situation - Activity: Tendency: National indicator for the Euro Area",
'BCOBLV02EZM460S': "Business Tendency Surveys for Construction: Order Books: Level: European Commission Indicator for the Euro Area",
'BCEMFT02EZM460S': "Business Tendency Surveys for Construction: Employment: Future Tendency: European Commission and National Indicators for the Euro Area",
'BCCICP02EZM460S': "Business Tendency Surveys for Construction: Confidence Indicators: Composite Indicators: European Commission and National Indicators for the Euro Area",
'BCSPFT02EZM460S': "Business Tendency Surveys for Construction: Selling Prices: Future Tendency: European Commission Indicator for the Euro Area"}
description = "Business tendency surveys (construction), Monthly, Seasonally Adjusted"
return df, name_list, description
def Business_Tendency_Surveys_Services():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19BVBUTE02STSAM,BVCICP02EZM460S,BVEMTE02EZM460S,BVEMFT02EZM460S,BVDEFT02EZM460S,BVDETE02EZM460S&scale=left,left,left,left,left,left&cosd=1995-04-01,1995-04-01,1995-04-01,1996-10-01,1995-04-01,1995-04-01&coed=2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d&link_values=false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none&mw=3,3,3,3,3,3&lw=2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0&fml=a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6&transformation=lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1995-04-01,1995-04-01,1995-04-01,1996-10-01,1995-04-01,1995-04-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19BVBUTE02STSAM': "Business tendency surveys (services): Business situation - Activity: Tendency: National indicator for Euro Area",
'BVCICP02EZM460S': "Business Tendency Surveys for Services: Confidence Indicators: Composite Indicators: European Commission and National Indicators for the Euro Area",
'BVEMTE02EZM460S': "Business Tendency Surveys for Services: Employment: Tendency: European Commission Indicator for the Euro Area",
'BVEMFT02EZM460S': "Business Tendency Surveys for Services: Employment: Future Tendency: European Commission and National Indicators for the Euro Area",
'BVDEFT02EZM460S': "Business Tendency Surveys for Services: Demand Evolution: Future Tendency: European Commission Indicator for the Euro Area",
'BVDETE02EZM460S': "Business Tendency Surveys for Services: Demand Evolution: Tendency: European Commission Indicator for the Euro Area"}
description = "Business tendency surveys (services), Monthly, Seasonally Adjusted"
return df, name_list, description
def Business_Tendency_Surveys_Manufacturing_Quarterly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=BSCURT02EZQ160S,BSOITE02EZQ460S&scale=left,left&cosd=1985-01-01,1985-01-01&coed=2021-04-01,2021-04-01&line_color=%234572a7,%23aa4643&link_values=false,false&line_style=solid,solid&mark_type=none,none&mw=3,3&lw=2,2&ost=-99999,-99999&oet=99999,99999&mma=0,0&fml=a,a&fq=Quarterly,Quarterly&fam=avg,avg&fgst=lin,lin&fgsnd=2020-02-01,2020-02-01&line_index=1,2&transformation=lin,lin&vintage_date=2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07&nd=1985-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'BSCURT02EZQ160S': "Business Tendency Surveys for Manufacturing: Capacity Utilization: Rate of Capacity Utilization: European Commission and National Indicators for the Euro Area",
'BSOITE02EZQ460S': "Business Tendency Surveys for Manufacturing: Orders Inflow: Tendency: European Commission Indicator for the Euro Area"}
description = "Business tendency surveys (manufacturing), Quarterly, Seasonally Adjusted"
return df, name_list, description
def Business_Tendency_Surveys_Manufacturing_Monthly():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=BSSPFT02EZM460S,BSOBLV02EZM460S,BSEMFT02EZM460S,BSFGLV02EZM460S,BSXRLV02EZM086S,BSCICP02EZM460S,BSPRTE02EZM460S,BSPRFT02EZM460S&scale=left,left,left,left,left,left,left,left&cosd=1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01&coed=2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c&link_values=false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8&transformation=lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'BSSPFT02EZM460S': "Business Tendency Surveys for Manufacturing: Selling Prices: Future Tendency: European Commission Indicator for the Euro Area",
'BSOBLV02EZM460S': "Business Tendency Surveys for Manufacturing: Order Books: Level: European Commission and National Indicators for the Euro Area",
'BSEMFT02EZM460S': "Business Tendency Surveys for Manufacturing: Employment: Future Tendency: European Commission and National Indicators for the Euro Area",
'BSFGLV02EZM460S': "Business Tendency Surveys for Manufacturing: Finished Goods Stocks: Level: European Commission and National Indicators for the Euro Area",
'BSXRLV02EZM086S': "Business Tendency Surveys for Manufacturing: Export Order Books or Demand: Level: European Commission Indicator for the Euro Area",
'BSCICP02EZM460S': "Business Tendency Surveys for Manufacturing: Confidence Indicators: Composite Indicators: European Commission and National Indicators for the Euro Area",
'BSPRTE02EZM460S': "Business Tendency Surveys for Manufacturing: Production: Tendency: European Commission and National Indicators for the Euro Area",
'BSPRFT02EZM460S': "Business Tendency Surveys for Manufacturing: Production: Future Tendency: European Commission and National Indicators for the Euro Area"}
description = "Business tendency surveys (manufacturing), Monthly, Seasonally Adjusted"
return df, name_list, description
def Business_Tendency_Surveys_Retail_Trade():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19BREMFT02STSAM,EA19BRODFT02STSAM,EA19BRVSLV02STSAM,EA19BRCICP02STSAM,EA19BRBUFT02STSAM,EA19BRBUTE02STSAM&scale=left,left,left,left,left,left&cosd=1985-04-01,1985-02-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01&coed=2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01,2021-04-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d&link_values=false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none&mw=3,3,3,3,3,3&lw=2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0&fml=a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6&transformation=lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1985-04-01,1985-02-01,1985-01-01,1985-01-01,1985-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19BREMFT02STSAM': "Business tendency surveys (retail trade): Employment: Future tendency: National indicator for the Euro Area",
'EA19BRODFT02STSAM': "Business tendency surveys (retail trade): Order intentions or Demand: Future tendency: National indicator for the Euro Area",
'EA19BRVSLV02STSAM': "Business tendency surveys (retail trade): Volume of stocks: Level: National indicator for the Euro Area",
'EA19BRCICP02STSAM': "Business tendency surveys (retail trade): Confidence indicators: Composite indicators: National indicator for the Euro Area",
'EA19BRBUFT02STSAM': "Business tendency surveys (retail trade): Business situation - Activity: Future tendency: National indicator for Euro Area",
'EA19BRBUTE02STSAM': "Business tendency surveys (retail trade): Business situation - Activity: Tendency: National indicator for Euro Area"}
description = "Business tendency surveys (retail trade), Monthly, Seasonally Adjusted"
return df, name_list, description
def Labor_Compensation_Quarterly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LCEAMN01EZQ661S,LCEAPR01EZQ661S&scale=left,left&cosd=1971-01-01,1996-01-01&coed=2020-10-01,2020-10-01&line_color=%234572a7,%23aa4643&link_values=false,false&line_style=solid,solid&mark_type=none,none&mw=3,3&lw=2,2&ost=-99999,-99999&oet=99999,99999&mma=0,0&fml=a,a&fq=Quarterly,Quarterly&fam=avg,avg&fgst=lin,lin&fgsnd=2020-02-01,2020-02-01&line_index=1,2&transformation=lin,lin&vintage_date=2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07&nd=1971-01-01,1996-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LCEAMN01EZQ661S': "Hourly Earnings: Manufacturing for the Euro Area",
'LCEAPR01EZQ661S': "Hourly Earnings: Private Sector for the Euro Area"
}
description = "Labor Compensation, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Labor_Compensation_Quarterly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LCEAMN01EZQ661S,LCEAPR01EZQ661S&scale=left,left&cosd=1971-01-01,1996-01-01&coed=2020-10-01,2020-10-01&line_color=%234572a7,%23aa4643&link_values=false,false&line_style=solid,solid&mark_type=none,none&mw=3,3&lw=2,2&ost=-99999,-99999&oet=99999,99999&mma=0,0&fml=a,a&fq=Quarterly,Quarterly&fam=avg,avg&fgst=lin,lin&fgsnd=2020-02-01,2020-02-01&line_index=1,2&transformation=lin,lin&vintage_date=2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07&nd=1971-01-01,1996-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LCEAMN01EZQ661N': "Hourly Earnings: Manufacturing for the Euro Area",
'LCEAPR01EZQ661N': "Hourly Earnings: Private Sector for the Euro Area"
}
description = "Labor Compensation, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Unit_Labor_costs():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=ULQECU01EZQ661S,ULQEUL01EZQ659S,ULQELP01EZQ661S&scale=left,left,left&cosd=1995-01-01,1996-01-01,1995-01-01&coed=2020-10-01,2020-10-01,2020-10-01&line_color=%234572a7,%23aa4643,%2389a54e&link_values=false,false,false&line_style=solid,solid,solid&mark_type=none,none,none&mw=3,3,3&lw=2,2,2&ost=-99999,-99999,-99999&oet=99999,99999,99999&mma=0,0,0&fml=a,a,a&fq=Quarterly,Quarterly,Quarterly&fam=avg,avg,avg&fgst=lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3&transformation=lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07&nd=1995-01-01,1996-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'ULQECU01EZQ661S': "Early Estimate of Quarterly ULC Indicators: Total Labor Compensation per Unit of Labor Input for the Euro Area",
'ULQEUL01EZQ659S': "Early Estimate of Quarterly ULC Indicators: Total for the Euro Area",
'ULQELP01EZQ661S': "Early Estimate of Quarterly ULC Indicators: Total Labor Productivity for the Euro Area"}
description = "Unit Labor Costs, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Labor_Force_Survey_Rates_Quarterly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LRHU24TTEZQ156N,LRHU24FEEZQ156N,LRHU24MAEZQ156N,LRHUADMAEZQ156N,LRHUADTTEZQ156N,LRHUADFEEZQ156N,LRHUTTFEEZQ156N,LRHUTTTTEZQ156N,LRHUTTMAEZQ156N&scale=left,left,left,left,left,left,left,left,left&cosd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1993-01-01,1993-01-01,1993-01-01&coed=2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92&link_values=false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1993-01-01,1993-01-01,1993-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LRHU24TTEZQ156N': "Harmonized Unemployment: Aged 15-24: All Persons for the Euro Area",
'LRHU24FEEZQ156N': "Harmonized Unemployment: Aged 15-24: Females for the Euro Area",
'LRHU24MAEZQ156N': "Harmonized Unemployment: Aged 15-24: Males for the Euro Area",
'LRHUADMAEZQ156N': "Harmonized Unemployment: Aged 25 and Over: Males for the Euro Area",
'LRHUADTTEZQ156N': "Harmonized Unemployment: Aged 25 and Over: All Persons for the Euro Area",
'LRHUADFEEZQ156N': "Harmonized Unemployment: Aged 25 and Over: Females for the Euro Area",
'LRHUTTFEEZQ156N': "Harmonized Unemployment: Total: Females for the Euro Area",
'LRHUTTTTEZQ156N': "Harmonized Unemployment Rate: Total: All Persons for the Euro Area",
'LRHUTTMAEZQ156N': "Harmonized Unemployment: Total: Males for the Euro Area"}
description = "Labor Force Survey - quarterly rates, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Labor_Force_Survey_Rates_Quarterly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LRHU24MAEZQ156S,LRHU24TTEZQ156S,LRHU24FEEZQ156S,LRHUADFEEZQ156S,LRHUADMAEZQ156S,LRHUADTTEZQ156S,LRHUTTTTEZQ156S,LRHUTTMAEZQ156S,LRHUTTFEEZQ156S&scale=left,left,left,left,left,left,left,left,left&cosd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1990-07-01,1990-07-01,1990-07-01&coed=2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01,2021-01-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92&link_values=false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1990-07-01,1990-07-01,1990-07-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LRHU24MAEZQ156S': "Harmonized Unemployment: Aged 15-24: Males for the Euro Area",
'LRHU24TTEZQ156S': "Harmonized Unemployment: Aged 15-24: All Persons for the Euro Area",
'LRHU24FEEZQ156S': "Harmonized Unemployment: Aged 15-24: Females for the Euro Area",
'LRHUADFEEZQ156S': "Harmonized Unemployment: Aged 25 and Over: Females for the Euro Area",
'LRHUADMAEZQ156S': "Harmonized Unemployment: Aged 25 and Over: Males for the Euro Area",
'LRHUADTTEZQ156S': "Harmonized Unemployment: Aged 25 and Over: All Persons for the Euro Area",
'LRHUTTTTEZQ156S': "Harmonized Unemployment Rate: Total: All Persons for the Euro Area",
'LRHUTTMAEZQ156S': "Harmonized Unemployment: Total: Males for the Euro Area",
'LRHUTTFEEZQ156S': "Harmonized Unemployment: Total: Females for the Euro Area"}
description = "Labor Force Survey - quarterly rates, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Labor_Force_Survey_Rates_Monthly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LRHUTTFEEZM156N,LRHUTTMAEZM156N,LRHUTTTTEZM156N,LRHUADTTEZM156N,LRHUADMAEZM156N,LRHUADFEEZM156N,LRHU24FEEZM156N,LRHU24MAEZM156N,LRHU24TTEZM156N&scale=left,left,left,left,left,left,left,left,left&cosd=1993-01-01,1993-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01&coed=2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92&link_values=false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1993-01-01,1993-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LRHUTTFEEZM156N': "Harmonized Unemployment: Total: Females for the Euro Area",
'LRHUTTMAEZM156N': "Harmonized Unemployment: Total: Males for the Euro Area",
'LRHUTTTTEZM156N': "Harmonized Unemployment Rate: Total: All Persons for the Euro Area",
'LRHUADTTEZM156N': "Harmonized Unemployment: Aged 25 and Over: All Persons for the Euro Area",
'LRHUADMAEZM156N': "Harmonized Unemployment: Aged 25 and Over: Males for the Euro Area",
'LRHUADFEEZM156N': "Harmonized Unemployment: Aged 25 and Over: Females for the Euro Area",
'LRHU24FEEZM156N': "Harmonized Unemployment: Aged 15-24: Females for the Euro Area",
'LRHU24MAEZM156N': "Harmonized Unemployment: Aged 15-24: Males for the Euro Area",
'LRHU24TTEZM156N': "Harmonized Unemployment: Aged 15-24: All Persons for the Euro Area"}
description = "Labor Force Survey - quarterly rates, Monthly, Seasonally Adjusted"
return df, name_list, description
def Labor_Force_Survey_Level_Quarterly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LRHUTTFEEZM156N,LRHUTTMAEZM156N,LRHUTTTTEZM156N,LRHUADTTEZM156N,LRHUADMAEZM156N,LRHUADFEEZM156N,LRHU24FEEZM156N,LRHU24MAEZM156N,LRHU24TTEZM156N&scale=left,left,left,left,left,left,left,left,left&cosd=1993-01-01,1993-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01&coed=2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92&link_values=false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1993-01-01,1993-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LFHU24FEEZQ647N': "Harmonized Unemployment: Aged 15-24: Females for the Euro Area",
'LFHU24TTEZQ647N': "Harmonized Unemployment: Aged 15-24: All Persons for the Euro Area",
'LFHU24MAEZQ647N': "Harmonized Unemployment: Aged 15-24: Males for the Euro Area",
'LFHUADTTEZQ647N': "Harmonized Unemployment: Aged 25 and Over: All Persons for the Euro Area",
'LFHUADMAEZQ647N': "Harmonized Unemployment: Aged 25 and Over: Males for the Euro Area",
'LFHUADFEEZQ647N': "Harmonized Unemployment: Aged 25 and Over: Females for the Euro Area",
'LFHUTTMAEZQ647N': "Total Harmonized Unemployment: Males for the Euro Area",
'LFHUTTFEEZQ647N': "Total Harmonized Unemployment: Females for the Euro Area",
'LFHUTTTTEZQ647N': "Total Harmonized Unemployment: All Persons for the Euro Area"}
description = "Labor Force Survey - quarterly levels, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Labor_Force_Survey_Level_Quarterly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LRHUTTFEEZM156N,LRHUTTMAEZM156N,LRHUTTTTEZM156N,LRHUADTTEZM156N,LRHUADMAEZM156N,LRHUADFEEZM156N,LRHU24FEEZM156N,LRHU24MAEZM156N,LRHU24TTEZM156N&scale=left,left,left,left,left,left,left,left,left&cosd=1993-01-01,1993-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01&coed=2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92&link_values=false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1993-01-01,1993-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LFHU24TTEZQ647S': "Harmonized Unemployment: Aged 15-24: All Persons for the Euro Area",
'LFHU24MAEZQ647S': "Harmonized Unemployment: Aged 15-24: Males for the Euro Area",
'LFHU24FEEZQ647S': "Harmonized Unemployment: Aged 15-24: Females for the Euro Area",
'LFHUTTFEEZQ647S': "Total Harmonized Unemployment: Females for the Euro Area",
'LFHUTTTTEZQ647S': "Total Harmonized Unemployment: All Persons for the Euro Area",
'LFHUTTMAEZQ647S': "Total Harmonized Unemployment: Males for the Euro Area",
'LFHUADMAEZQ647S': "Harmonized Unemployment: Aged 25 and Over: Males for the Euro Area",
'LFHUADFEEZQ647S': "Harmonized Unemployment: Aged 25 and Over: Females for the Euro Area",
'LFHUADTTEZQ647S': "Harmonized Unemployment: Aged 25 and Over: All Persons for the Euro Area"}
description = "Labor Force Survey - quarterly levels, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Labor_Force_Survey_Level_Monthly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LFHU24FEEZM647S,LFHU24TTEZM647S,LFHU24MAEZM647S,LFHUADFEEZM647S,LFHUADTTEZM647S,LFHUADMAEZM647S,LFHUTTTTEZM647S,LFHUTTMAEZM647S,LFHUTTFEEZM647S&scale=left,left,left,left,left,left,left,left,left&cosd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01&coed=2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92&link_values=false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LFHU24FEEZM647S': "Harmonized Unemployment: Aged 15-24: Females for the Euro Area",
'LFHU24TTEZM647S': "Harmonized Unemployment: Aged 15-24: All Persons for the Euro Area",
'LFHU24MAEZM647S': "Harmonized Unemployment: Aged 15-24: Males for the Euro Area",
'LFHUADFEEZM647S': "Harmonized Unemployment: Aged 25 and Over: Females for the Euro Area",
'LFHUADTTEZM647S': "Harmonized Unemployment: Aged 25 and Over: All Persons for the Euro Area",
'LFHUADMAEZM647S': "Harmonized Unemployment: Aged 25 and Over: Males for the Euro Area",
'LFHUTTTTEZM647S': "Total Harmonized Unemployment: All Persons for the Euro Area",
'LFHUTTMAEZM647S': "Total Harmonized Unemployment: Males for the Euro Area",
'LFHUTTFEEZM647S': "Total Harmonized Unemployment: Females for the Euro Area"}
description = "Labor Force Survey - quarterly levels, Monthly, Seasonally Adjusted"
return df, name_list, description
def Labor_Force_Survey_Level_Monthly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=LFHU24MAEZM647N,LFHU24FEEZM647N,LFHU24TTEZM647N,LFHUADMAEZM647N,LFHUADFEEZM647N,LFHUADTTEZM647N,LFHUTTFEEZM647N,LFHUTTTTEZM647N,LFHUTTMAEZM647N&scale=left,left,left,left,left,left,left,left,left&cosd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01&coed=2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01,2021-03-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c,%23b5ca92&link_values=false,false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7,8,9&transformation=lin,lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'LFHU24MAEZM647N': "Harmonized Unemployment: Aged 15-24: Males for the Euro Area",
'LFHU24FEEZM647N': "Harmonized Unemployment: Aged 15-24: Females for the Euro Area",
'LFHU24TTEZM647N': "Harmonized Unemployment: Aged 15-24: All Persons for the Euro Area",
'LFHUADMAEZM647N': "Harmonized Unemployment: Aged 25 and Over: Males for the Euro Area",
'LFHUADFEEZM647N': "Harmonized Unemployment: Aged 25 and Over: Females for the Euro Area",
'LFHUADTTEZM647N': "Harmonized Unemployment: Aged 25 and Over: All Persons for the Euro Area",
'LFHUTTFEEZM647N': "Total Harmonized Unemployment: Females for the Euro Area",
'LFHUTTTTEZM647N': "Total Harmonized Unemployment: All Persons for the Euro Area",
'LFHUTTMAEZM647N': "Total Harmonized Unemployment: Males for the Euro Area"}
description = "Labor Force Survey - quarterly levels, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def Production_Monthly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19PRINTO01GYSAM,EA19PRMNCG03IXOBSAM,EA19PRMNCG02IXOBSAM,EA19PRMNVG01IXOBSAM,EA19PRMNTO01IXOBSAM,EA19PRMNIG01IXOBSAM,EA19PRCNTO01IXOBSAM&scale=left,left,left,left,left,left,left&cosd=1976-07-01,1985-01-01,1990-01-01,1985-01-01,1980-01-01,1985-01-01,1985-01-01&coed=2021-02-01,2017-12-01,2018-12-01,2018-12-01,2021-02-01,2018-12-01,2021-02-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd&link_values=false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2017-12-01,2018-12-01,2018-12-01,2020-02-01,2018-12-01,2020-02-01&line_index=1,2,3,4,5,6,7&transformation=lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1976-07-01,1985-01-01,1990-01-01,1985-01-01,1980-01-01,1985-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19PRINTO01GYSAM': "Production: Industry: Total industry: Total industry excluding construction for the Euro Area",
'EA19PRMNCG03IXOBSAM': "Production: Manufacturing: Consumer goods: Non durable goods for the Euro Area",
'EA19PRMNCG02IXOBSAM': "Production: Manufacturing: Consumer goods: Durable goods for the Euro Area",
'EA19PRMNVG01IXOBSAM': "Production: Manufacturing: Investment goods: Total for the Euro Area",
'EA19PRMNTO01IXOBSAM': "Production: Manufacturing: Total manufacturing: Total manufacturing for the Euro Area",
'EA19PRMNIG01IXOBSAM': "Production: Manufacturing: Intermediate goods: Total for the Euro Area",
'EA19PRCNTO01IXOBSAM': "Production: Construction: Total construction: Total for the Euro Area"}
description = "Production, Monthly, Seasonally Adjusted"
return df, name_list, description
def Production_Quarterly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=PRINTO01EZQ659S,PRMNVG01EZQ661S,PRMNCG02EZQ661S,PRMNCG03EZQ661S,PRMNTO01EZQ661S,PRMNIG01EZQ661S,PRCNTO01EZQ661S&scale=left,left,left,left,left,left,left&cosd=1976-07-01,1985-01-01,1990-01-01,1985-01-01,1980-01-01,1985-01-01,1985-01-01&coed=2020-10-01,2018-10-01,2018-10-01,2017-10-01,2020-10-01,2018-10-01,2020-10-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd&link_values=false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2018-10-01,2018-10-01,2017-10-01,2020-02-01,2018-10-01,2020-02-01&line_index=1,2,3,4,5,6,7&transformation=lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1976-07-01,1985-01-01,1990-01-01,1985-01-01,1980-01-01,1985-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'PRINTO01EZQ659S': "Total Industry Production Excluding Construction for the Euro Area",
'PRMNVG01EZQ661S': "Total Production of Investment Goods for Manufacturing for the Euro Area",
'PRMNCG02EZQ661S': "Production of Durable Consumer Goods for Manufacturing for the Euro Area",
'PRMNCG03EZQ661S': "Production of Nondurable Consumer Goods for Manufacturing for the Euro Area",
'PRMNTO01EZQ661S': "Total Manufacturing Production for the Euro Area",
'PRMNIG01EZQ661S': "Total Production of Intermediate Goods for Manufacturing for the Euro Area",
'PRCNTO01EZQ661S': "Total Construction for the Euro Area"}
description = "Production, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def Production_Monthly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19PRMNIG01IXOBM,EA19PRMNTO01IXOBM,EA19PRMNCG02IXOBM,EA19PRMNCG03IXOBM,EA19PRMNVG01IXOBM,EA19PRCNTO01IXOBM,EA19PRINTO01IXOBM&scale=left,left,left,left,left,left,left&cosd=1985-01-01,1980-01-01,1990-01-01,1985-01-01,1985-01-01,1985-01-01,1980-01-01&coed=2018-12-01,2021-02-01,2018-12-01,2018-12-01,2018-12-01,2021-02-01,2021-02-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd&link_values=false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin&fgsnd=2018-12-01,2020-02-01,2018-12-01,2018-12-01,2018-12-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7&transformation=lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1985-01-01,1980-01-01,1990-01-01,1985-01-01,1985-01-01,1985-01-01,1980-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19PRMNIG01IXOBM': "Production: Manufacturing: Intermediate goods: Total for the Euro Area",
'EA19PRMNTO01IXOBM': "Production: Manufacturing: Total manufacturing: Total manufacturing for the Euro Area",
'EA19PRMNCG02IXOBM': "Production: Manufacturing: Consumer goods: Durable goods for the Euro Area",
'EA19PRMNCG03IXOBM': "Production: Manufacturing: Consumer goods: Non durable goods for the Euro Area",
'EA19PRMNVG01IXOBM': "Production: Manufacturing: Investment goods: Total for the Euro Area",
'EA19PRCNTO01IXOBM': "Production: Construction: Total construction: Total for the Euro Area",
'EA19PRINTO01IXOBM': "Production: Industry: Total industry: Total industry excluding construction for the Euro Area"}
description = "Production, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def Production_Quarterly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=PRMNCG03EZQ661N,PRMNCG02EZQ661N,PRMNVG01EZQ661N,PRMNIG01EZQ661N,PRMNTO01EZQ661N,PRINTO01EZQ661N,PRCNTO01EZQ661N&scale=left,left,left,left,left,left,left&cosd=1985-01-01,1990-01-01,1985-01-01,1985-01-01,1980-01-01,1980-01-01,1985-01-01&coed=2018-10-01,2018-10-01,2018-10-01,2018-10-01,2020-10-01,2020-10-01,2020-10-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd&link_values=false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin&fgsnd=2018-10-01,2018-10-01,2018-10-01,2018-10-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7&transformation=lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1985-01-01,1990-01-01,1985-01-01,1985-01-01,1980-01-01,1980-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'PRMNCG03EZQ661N': "Production of Nondurable Consumer Goods for Manufacturing for the Euro Area",
'PRMNCG02EZQ661N': "Production of Durable Consumer Goods for Manufacturing for the Euro Area",
'PRMNVG01EZQ661N': "Total Production of Investment Goods for Manufacturing for the Euro Area",
'PRMNIG01EZQ661N': "Total Production of Intermediate Goods for Manufacturing for the Euro Area",
'PRMNTO01EZQ661N': "Total Manufacturing Production for the Euro Area",
'PRINTO01EZQ661N': "Total Industry Production Excluding Construction for the Euro Area",
'PRCNTO01EZQ661N': "Total Construction for the Euro Area"}
description = "Production, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Sales_Monthly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19SLMNTO02IXOBSAM,EA19SLMNIG02IXOBSAM,EA19SLMNCD02IXOBSAM,EA19SLMNCN02IXOBSAM,EA19SLMNVG02IXOBSAM,EA19SLRTTO01IXOBSAM,EA19SLRTTO02IXOBSAM,EA19SLRTCR03IXOBSAM&scale=left,left,left,left,left,left,left,left&cosd=1980-01-01,1990-01-01,1993-01-01,1995-01-01,1980-01-01,1995-01-01,1995-01-01,1970-01-01&coed=2021-02-01,2018-12-01,2018-12-01,2018-12-01,2018-12-01,2021-02-01,2021-02-01,2018-12-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c&link_values=false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2018-12-01,2018-12-01,2018-12-01,2018-12-01,2020-02-01,2020-02-01,2018-12-01&line_index=1,2,3,4,5,6,7,8&transformation=lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1980-01-01,1990-01-01,1993-01-01,1995-01-01,1980-01-01,1995-01-01,1995-01-01,1970-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19SLMNTO02IXOBSAM': "Sales: Manufacturing: Total manufacturing: Value for the Euro Area",
'EA19SLMNIG02IXOBSAM': "Sales: Manufacturing: Intermediate goods: Value for the Euro Area",
'EA19SLMNCD02IXOBSAM': "Sales: Manufacturing: Consumer goods durable: Value for the Euro Area",
'EA19SLMNCN02IXOBSAM': "Sales: Manufacturing: Consumer goods non durable: Value for the Euro Area",
'EA19SLMNVG02IXOBSAM': "Sales: Manufacturing: Investment goods: Value for the Euro Area",
'EA19SLRTTO01IXOBSAM': "Sales: Retail trade: Total retail trade: Volume for the Euro Area",
'EA19SLRTTO02IXOBSAM': "Sales: Retail trade: Total retail trade: Value for the Euro Area",
'EA19SLRTCR03IXOBSAM': "Sales: Retail trade: Car registration: Passenger cars for the Euro Area"}
description = "Sales, Monthly, Seasonally Adjusted"
return df, name_list, description
def Sales_Quarterly_Adj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=SLMNTO02EZQ661S,SLMNVG02EZQ661S,SLMNCD02EZQ661S,SLMNCN02EZQ661S,SLMNIG02EZQ661S,SLRTTO02EZQ661S,SLRTTO01EZQ659S,SLRTCR03EZQ661S&scale=left,left,left,left,left,left,left,left&cosd=1980-01-01,1980-01-01,1993-01-01,1995-01-01,1990-01-01,1995-01-01,1996-01-01,1970-01-01&coed=2020-10-01,2018-10-01,2018-10-01,2018-10-01,2018-10-01,2020-10-01,2020-10-01,2018-10-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd,%23a47d7c&link_values=false,false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin,lin&fgsnd=2020-02-01,2018-10-01,2018-10-01,2018-10-01,2018-10-01,2020-02-01,2020-02-01,2018-10-01&line_index=1,2,3,4,5,6,7,8&transformation=lin,lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1980-01-01,1980-01-01,1993-01-01,1995-01-01,1990-01-01,1995-01-01,1996-01-01,1970-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'SLMNTO02EZQ661S': "Sales Value of Total Manufactured Goods for the Euro Area",
'SLMNVG02EZQ661S': "Sales Value of Manufactured Investment Goods for the Euro Area",
'SLMNCD02EZQ661S': "Sales Value of Manufactured Durable Consumer Goods for the Euro Area",
'SLMNCN02EZQ661S': "Sales Value of Manufactured Nondurable Consumer Goods for the Euro Area",
'SLMNIG02EZQ661S': "Sales Value of Manufactured Intermediate Goods for the Euro Area",
'SLRTTO02EZQ661S': "Value of Total Retail Trade sales for the Euro Areaa",
'SLRTTO01EZQ659S': "Volume of Total Retail Trade sales for the Euro Area",
'SLRTCR03EZQ661S': "Retail Trade Sales: Passenger Car Registrations for the Euro Area"}
description = "Sales, Quarterly, Seasonally Adjusted"
return df, name_list, description
def Sales_Monthly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=EA19SLMNIG02IXOBM,EA19SLRTTO02IXOBM,EA19SLMNCD02IXOBM,EA19SLMNCN02IXOBM,EA19SLMNTO02IXOBM,EA19SLRTCR03IXOBM,EA19SLRTTO01IXOBM&scale=left,left,left,left,left,left,left&cosd=1990-01-01,1995-01-01,1993-01-01,1995-01-01,1980-01-01,1985-01-01,1995-01-01&coed=2018-12-01,2021-02-01,2018-12-01,2018-12-01,2021-02-01,2021-03-01,2021-02-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd&link_values=false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a&fq=Monthly,Monthly,Monthly,Monthly,Monthly,Monthly,Monthly&fam=avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin&fgsnd=2018-12-01,2020-02-01,2018-12-01,2018-12-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7&transformation=lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1990-01-01,1995-01-01,1993-01-01,1995-01-01,1980-01-01,1985-01-01,1995-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'EA19SLMNIG02IXOBM': "Sales: Manufacturing: Intermediate goods: Value for the Euro Area",
'EA19SLRTTO02IXOBM': "Sales: Retail trade: Total retail trade: Value for the Euro Area",
'EA19SLMNCD02IXOBM': "Sales: Manufacturing: Consumer goods durable: Value for the Euro Area",
'EA19SLMNCN02IXOBM': "Sales: Manufacturing: Consumer goods non durable: Value for the Euro Area",
'EA19SLMNTO02IXOBM': "Sales: Manufacturing: Total manufacturing: Value for the Euro Area",
'EA19SLRTCR03IXOBM': "Sales: Retail trade: Car registration: Passenger cars for the Euro Area",
'EA19SLRTTO01IXOBM': "Sales: Retail trade: Total retail trade: Volume for the Euro Area"}
description = "Sales, Monthly, Not Seasonally Adjusted"
return df, name_list, description
def Sales_Quarterly_NAdj():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=SLMNIG02EZQ661N,SLMNTO02EZQ661N,SLMNCD02EZQ661N,SLMNCN02EZQ661N,SLRTTO01EZQ661N,SLRTTO02EZQ661N,SLRTCR03EZQ661N&scale=left,left,left,left,left,left,left&cosd=1990-01-01,1980-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1985-01-01&coed=2018-10-01,2020-10-01,2018-10-01,2018-10-01,2020-10-01,2020-10-01,2021-01-01&line_color=%234572a7,%23aa4643,%2389a54e,%2380699b,%233d96ae,%23db843d,%2392a8cd&link_values=false,false,false,false,false,false,false&line_style=solid,solid,solid,solid,solid,solid,solid&mark_type=none,none,none,none,none,none,none&mw=3,3,3,3,3,3,3&lw=2,2,2,2,2,2,2&ost=-99999,-99999,-99999,-99999,-99999,-99999,-99999&oet=99999,99999,99999,99999,99999,99999,99999&mma=0,0,0,0,0,0,0&fml=a,a,a,a,a,a,a&fq=Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly,Quarterly&fam=avg,avg,avg,avg,avg,avg,avg&fgst=lin,lin,lin,lin,lin,lin,lin&fgsnd=2018-10-01,2020-02-01,2018-10-01,2018-10-01,2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3,4,5,6,7&transformation=lin,lin,lin,lin,lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07,2021-06-07&nd=1990-01-01,1980-01-01,1993-01-01,1995-01-01,1995-01-01,1995-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'SLMNIG02EZQ661N': "Sales Value of Manufactured Intermediate Goods for the Euro Area",
'SLMNTO02EZQ661N': "Sales Value of Total Manufactured Goods for the Euro Area",
'SLMNCD02EZQ661N': "Sales Value of Manufactured Durable Consumer Goods for the Euro Area",
'SLMNCN02EZQ661N': "Sales Value of Manufactured Nondurable Consumer Goods for the Euro Area",
'SLRTTO01EZQ661N': "Volume of Total Retail Trade sales for the Euro Area",
'SLRTTO02EZQ661N': "Value of Total Retail Trade sales for the Euro Area",
'SLRTCR03EZQ661N': "Retail Trade Sales: Passenger Car Registrations for the Euro Area"}
description = "Sales, Quarterly, Not Seasonally Adjusted"
return df, name_list, description
def Consumer_Opinion_Survey():
tmp_url = url["fred_econ"] + "bgcolor=%23e1e9f0&chart_type=line&drp=0&fo=open%20sans&graph_bgcolor=%23ffffff&height=450&mode=fred&recession_bars=off&txtcolor=%23444444&ts=12&tts=12&width=748&nt=0&thu=0&trc=0&show_legend=yes&show_axis_titles=yes&show_tooltip=yes&id=CSCICP02EZM460S,CSESFT02EZM460S,CSINFT02EZM460S&scale=left,left,left&cosd=1973-01-01,1985-01-01,1985-01-01&coed=2021-04-01,2021-04-01,2021-04-01&line_color=%234572a7,%23aa4643,%2389a54e&link_values=false,false,false&line_style=solid,solid,solid&mark_type=none,none,none&mw=3,3,3&lw=2,2,2&ost=-99999,-99999,-99999&oet=99999,99999,99999&mma=0,0,0&fml=a,a,a&fq=Monthly,Monthly,Monthly&fam=avg,avg,avg&fgst=lin,lin,lin&fgsnd=2020-02-01,2020-02-01,2020-02-01&line_index=1,2,3&transformation=lin,lin,lin&vintage_date=2021-06-07,2021-06-07,2021-06-07&revision_date=2021-06-07,2021-06-07,2021-06-07&nd=1973-01-01,1985-01-01,1985-01-01"
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
r = requests.get(tmp_url, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df["DATE"] = pd.to_datetime(df["DATE"], format="%Y-%m-%d")
#df = df[list(df.columns[1:])].replace(".", np.nan).astype(float)
name_list = {
'CSCICP02EZM460S': "Consumer Opinion Surveys: Confidence Indicators: Composite Indicators: European Commission and National Indicators for the Euro Area",
'CSESFT02EZM460S': "Consumer Opinion Surveys: Economic Situation: Future Tendency: European Commission Indicator for the Euro Area",
'CSINFT02EZM460S': "Consumer Opinion Surveys: Consumer Prices: Future Tendency of Inflation: European Commission and National Indicators for the Euro Area"}
description = "Consumer opinion surveys, Monthly, Seasonally Adjusted"
return df, name_list, description
def EU_EPU_Monthly():
df = pd.read_excel("https://www.policyuncertainty.com/media/Europe_Policy_Uncertainty_Data.xlsx")[:-1]
df['Date']=pd.to_datetime(df['Year'].apply(str).str.cat(df['Month'].apply(int).apply(str),sep='-'), format='%Y-%m')
df = df[["Date", "European_News_Index", "Germany_News_Index", "Italy_News_Index", "UK_News_Index", "France_News_Index"]]
return df
class ecb_data(object):
def __init__(self, url=url["ecb"]):
self.url = url
def codebook(self):
return "please follow the ECB's codebook: https://sdw.ecb.europa.eu/browse.do?node=9691101"
def get_data(self,
datacode="ICP",
key="M.U2.N.000000.4.ANR",
startdate="2000-01-01",
enddate="2020-01-01"):
"""
"""
tmp_url = self.url + "{}/".format(datacode) + "{}".format(key)
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random, 'Accept': 'text/csv'}
request_params = {
"startPeriod": "{}".format(startdate),
"endPeriod": "{}".format(enddate)
}
r = requests.get(
tmp_url,
params=request_params,
headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
return df
class eurostat_data(object):
def __init__(self, url=url["eurostat"]):
self.url = url
def codebook(self):
return "please follow the EuroStat's codebook: \nhttps://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing?sort=1&dir=dic"
def get_data(self,
datasetcode="nama_10_gdp",
precision="1",
unit="CP_MEUR",
na_item="B1GQ",
time="2020"):
"""
"""
tmp_url = self.url + "{}".format(datasetcode)
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random, 'Accept': 'text/csv'}
request_params = {
"precision": "{}".format(precision),
"unit": "{}".format(unit),
"na_item": "{}".format(na_item),
"time": "{}".format(time)
}
r = requests.get(
tmp_url,
params=request_params,
headers=request_header)
data_text = r.text
data_json = demjson.decode(data_text)
value = data_json['value']
abb = data_json['dimension']['geo']['category']['index']
abb = {abb[k]: k for k in abb}
geo = data_json['dimension']['geo']['category']['label']
geo_list = [abb[int(k)] for k in list(value.keys())]
geo = [geo[k] for k in geo_list]
df = pd.DataFrame(
{"Geo": geo, "{}".format(na_item): list(value.values())})
return df
def QtoM(data:pd.Series):
date = pd.PeriodIndex(data.str.replace(r'(Q\d)_(\d+)', r'20\2-\1'), freq='Q').strftime('%Y-%m-%d')
return date
# EU - Main Economic Indicator
ecb = ecb_data()
eurostat = eurostat_data()
eu_columns_list = {
"Gross Domestic Product", "Private Finance Consumption", "Government final consumption",
"Gross fixed capital formation", "Changes in inventories and acquisition less disposals of valuables",
"Exports of goods and services", "Imports of goods and services"
}
# https://www.ecb.europa.eu/stats/ecb_statistics/key_euro_area_indicators/html/index.en.html
startdate, enddate = "2010-01-01", "2021-01-01"
daterange = pd.DataFrame({"Date": pd.date_range(start=startdate, end=enddate, freq="MS")})
class real_sector():
def __init__(self, startdate=startdate, enddate=enddate, daterange=daterange):
self.startdate = startdate
self.enddate = enddate
self.daterange = daterange
class current_price_gdp_by_expenditure_category(real_sector):
## National Account (current price)
def __init__(self):
super(current_price_gdp_by_expenditure_category, self).__init__()
pass
def gdp(self):
"""
* Title: Gross domestic product at market prices
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W2.S1.S1.B.B1GQ._Z._Z._Z.EUR.V.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gdp = ecb.get_data(datacode="MNA", key="Q.Y.I8.W2.S1.S1.B.B1GQ._Z._Z._Z.EUR.V.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gdp.columns = ["Date", "EU_GDP"]
eu_gdp["Date"] = pd.to_datetime(QtoM(eu_gdp["Date"]), format="%Y-%m-%d")
eu_gdp = pd.merge_asof(self.daterange, eu_gdp, on="Date", direction="nearest")
return eu_gdp
def pfc(self):
"""
* Title: Private final consumption
* URL: http://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W0.S1M.S1.D.P31._Z._Z._T.EUR.V.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_pfc = ecb.get_data(datacode="MNA", key="Q.Y.I8.W0.S1M.S1.D.P31._Z._Z._T.EUR.V.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pfc.columns = ["Date", "EU_PFC"]
eu_pfc["Date"] = pd.to_datetime(QtoM(eu_pfc["Date"]), format="%Y-%m-%d")
eu_pfc = pd.merge_asof(self.daterange, eu_pfc, on="Date", direction="nearest")
return eu_pfc
def gfc(self):
"""
* Title: Government final consumption
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W0.S13.S1.D.P3._Z._Z._T.EUR.V.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gfc = ecb.get_data(datacode="MNA", key="Q.Y.I8.W0.S13.S1.D.P3._Z._Z._T.EUR.V.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gfc.columns = ["Date", "EU_GFC"]
eu_gfc["Date"] = pd.to_datetime(QtoM(eu_gfc["Date"]), format="%Y-%m-%d")
eu_gfc = pd.merge_asof(daterange, self.eu_gfc, on="Date", direction="nearest")
return eu_gfc
def gfcf(self):
"""
* Title: Gross fixed capital formation
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W0.S1.S1.D.P51G.N11G._T._Z.EUR.V.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gfcf = ecb.get_data(datacode="MNA", key="Q.Y.I8.W0.S1.S1.D.P51G.N11G._T._Z.EUR.V.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gfcf.columns = ["Date", "EU_GFCF"]
eu_gfcf["Date"] = pd.to_datetime(QtoM(eu_gfcf["Date"]), format="%Y-%m-%d")
eu_gfcf = pd.merge_asof(self.daterange, eu_gfcf, on="Date", direction="nearest")
return eu_gfcf
def cia(self):
"""
* Title: Changes in inventories and acquisition less disposals of valuables
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W0.S1.S1.D.P5M.N1MG._T._Z.EUR.V.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_cia = ecb.get_data(datacode="MNA", key="Q.Y.I8.W0.S1.S1.D.P5M.N1MG._T._Z.EUR.V.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_cia.columns = ["Date", "EU_CIA"]
eu_cia["Date"] = pd.to_datetime(QtoM(eu_cia["Date"]), format="%Y-%m-%d")
eu_cia = pd.merge_asof(self.daterange, eu_cia, on="Date", direction="nearest")
return eu_cia
def export(self):
"""
* Title: Exports of goods and services
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=Q.Y.I8.W1.S1.S1.D.P6._Z._Z._Z.EUR.V.NN
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_export = ecb.get_data(datacode="MNA", key="Q.Y.I8.W1.S1.S1.D.P6._Z._Z._Z.EUR.V.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_export.columns = ["Date", "EU_EXPORT"]
eu_export["Date"] = pd.to_datetime(QtoM(eu_export["Date"]), format="%Y-%m-%d")
eu_export = pd.merge_asof(self.daterange, eu_export, on="Date", direction="nearest")
def import_(self):
"""
* Title: Imports of goods and services
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=Q.Y.I8.W1.S1.S1.C.P7._Z._Z._Z.EUR.V.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_import = ecb.get_data(datacode="MNA", key="Q.Y.I8.W1.S1.S1.C.P7._Z._Z._Z.EUR.V.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_import.columns = ["Date", "EU_IMPORT"]
eu_import["Date"] = pd.to_datetime(QtoM(eu_import["Date"]), format="%Y-%m-%d")
eu_import = pd.merge_asof(self.daterange, eu_import, on="Date", direction="nearest")
return eu_import
class volume_gdp_by_expenditure_category_in_previous_year_price(real_sector):
## National Account (volume in previous year price)
## National Account (current price)
def __init__(self):
super(volume_gdp_by_expenditure_category_in_previous_year_price, self).__init__()
pass
def gdp(self):
"""
* Title: Gross domestic product at market prices
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W2.S1.S1.B.B1GQ._Z._Z._Z.IX.LR.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gdp = ecb.get_data(datacode="MNA", key="Q.Y.I8.W2.S1.S1.B.B1GQ._Z._Z._Z.IX.LR.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gdp.columns = ["Date", "EU_GDP"]
eu_gdp["Date"] = pd.to_datetime(QtoM(eu_gdp["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_gdp = pd.merge_asof(self.daterange, eu_gdp, on="Date", direction="nearest")
return eu_gdp
def pfc(self):
"""
* Title: Private final consumption
* URL: http://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W0.S1M.S1.D.P31._Z._Z._T.IX.LR.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_pfc = ecb.get_data(datacode="MNA", key="Q.Y.I8.W0.S1M.S1.D.P31._Z._Z._T.IX.LR.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pfc.columns = ["Date", "EU_PFC"]
eu_pfc["Date"] = pd.to_datetime(QtoM(eu_pfc["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_pfc = pd.merge_asof(self.daterange, eu_pfc, on="Date", direction="nearest")
return eu_pfc
def gfc(self):
"""
* Title: Government final consumption
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W0.S13.S1.D.P3._Z._Z._T.IX.LR.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gfc = ecb.get_data(datacode="MNA", key="Q.Y.I8.W0.S13.S1.D.P3._Z._Z._T.IX.LR.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gfc.columns = ["Date", "EU_GFC"]
eu_gfc["Date"] = pd.to_datetime(QtoM(eu_gfc["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_gfc = pd.merge_asof(self.daterange, eu_gfc, on="Date", direction="nearest")
return eu_gfc
def gfcf(self):
"""
* Title: Gross fixed capital formation
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=MNA.Q.Y.I8.W0.S1.S1.D.P51G.N11G._T._Z.IX.LR.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gfcf = ecb.get_data(datacode="MNA", key="Q.Y.I8.W0.S1.S1.D.P51G.N11G._T._Z.IX.LR.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gfcf.columns = ["Date", "EU_GFCF"]
eu_gfcf["Date"] = pd.to_datetime(QtoM(eu_gfcf["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_gfcf = pd.merge_asof(self.daterange, eu_gfcf, on="Date", direction="nearest")
return eu_gfcf
def export(self):
"""
* Title: Exports of goods and services
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=Q.Y.I8.W1.S1.S1.D.P6._Z._Z._Z.IX.LR.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_export = ecb.get_data(datacode="MNA", key="Q.Y.I8.W1.S1.S1.D.P6._Z._Z._Z.IX.LR.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_export.columns = ["Date", "EU_EXPORT"]
eu_export["Date"] = pd.to_datetime(QtoM(eu_export["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_export = pd.merge_asof(self.daterange, eu_export, on="Date", direction="nearest")
return eu_gfcf
def import_(self):
"""
* Title: Imports of goods and services
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=Q.Y.I8.W1.S1.S1.C.P7._Z._Z._Z.IX.LR.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_import = ecb.get_data(datacode="MNA", key="Q.Y.I8.W1.S1.S1.C.P7._Z._Z._Z.IX.LR.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_import.columns = ["Date", "EU_IMPORT"]
eu_import["Date"] = pd.to_datetime(QtoM(eu_import["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_import = pd.merge_asof(self.daterange, eu_import, on="Date", direction="nearest")
return eu_import
def industrial_production(self):
"""
* Title: Industrial production for the euro area
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=132.STS.M.I8.Y.PROD.NS0020.4.000
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_industrial_production = ecb.get_data(datacode="STS", key="M.I8.Y.PROD.NS0020.4.000", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_industrial_production.columns = ["Date", "EU_INDUSTRIAL_PRODUCTION"]
eu_industrial_production["Date"] = pd.to_datetime(QtoM(eu_industrial_production["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_industrial_production = pd.merge_asof(self.daterange, eu_industrial_production, on="Date", direction="nearest")
return eu_industrial_production
def employment(self):
"""
* Title: Employment (in thousands of persons)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=ENA.Q.Y.I8.W2.S1.S1._Z.EMP._Z._T._Z.PS._Z.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_employment = ecb.get_data(datacode="ENA", key="Q.Y.I8.W2.S1.S1._Z.EMP._Z._T._Z.PS._Z.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_employment.columns = ["Date", "EU_EMPLOYMENT"]
eu_employment["Date"] = pd.to_datetime(QtoM(eu_employment["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_employment = pd.merge_asof(self.daterange, eu_employment, on="Date", direction="nearest")
return eu_employment
def unemployment(self):
"""
* Title: Unemployment (in thousands of persons)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=LFSI.M.I8.S.UNEMPL.TOTAL0.15_74.T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_unemployment = ecb.get_data(datacode="LFSI", key="M.I8.S.UNEMPL.TOTAL0.15_74.T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_unemployment.columns = ["Date", "EU_UNEMPLOYMENT"]
eu_unemployment["Date"] = pd.to_datetime(eu_unemployment["Date"], format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
return eu_unemployment
def unemployment_rate(self):
"""
* Title: Unemployment Rate
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=LFSI.M.I8.S.UNEHRT.TOTAL0.15_74.T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_unemployment_rate = ecb.get_data(datacode="LFSI", key="M.I8.S.UNEHRT.TOTAL0.15_74.T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_unemployment_rate.columns = ["Date", "EU_UNEMPLOYMENT_RATE"]
eu_unemployment_rate["Date"] = pd.to_datetime(eu_unemployment_rate["Date"], format="%Y-%m-%d")
return eu_unemployment_rate
def labour_cost_index(self):
"""
* Title: Labour cost index
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=LCI.Q.I8.Y.LCI_T.BTN
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_labour_cost_index = ecb.get_data(datacode="LCI", key="Q.I8.Y.LCI_T.BTN", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_labour_cost_index.columns = ["Date", "EU_LABOUR_COST_INDEX"]
eu_labour_cost_index["Date"] = pd.to_datetime(QtoM(eu_labour_cost_index["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_labour_cost_index = pd.merge_asof(self.daterange, eu_labour_cost_index, on="Date", direction="nearest")
return eu_labour_cost_index
def hicp(self):
"""
* Title: HICP - Overall index
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=ICP.M.U2.N.000000.4.INX
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_hicp = ecb.get_data(datacode="ICP", key="M.U2.N.000000.4.INX", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_hicp.columns = ["Date", "EU_HICP"]
eu_hicp["Date"] = pd.to_datetime(eu_hicp["Date"], format="%Y-%m-%d")
return eu_hicp
def ppi(self):
"""
* Title:Industrial producer prices (excl. construction) for the euro area [PPI]
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=STS.M.I8.N.PRIN.NS0020.4.000
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_ppi = ecb.get_data(datacode="STS", key="M.I8.N.PRIN.NS0020.4.000", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ppi.columns = ["Date", "EU_PPI"]
eu_ppi["Date"] = pd.to_datetime(eu_ppi["Date"], format="%Y-%m-%d")
return eu_ppi
class fiscal_sector():
def __init__(self, startdate=startdate, enddate=enddate, daterange=daterange):
self.startdate = startdate
self.enddate = enddate
self.daterange = daterange
class general_government_operation(fiscal_sector):
## National Account (current price)
def __init__(self):
super(general_government_operation, self).__init__()
pass
def revenue(self):
"""
* Title: Government total revenue (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.Q.N.I8.W0.S13.S1.P.C.OTR._Z._Z._Z.XDC_R_B1GQ_CY._Z.S.V.CY._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gtr = ecb.get_data(datacode="GFS", key="Q.N.I8.W0.S13.S1.P.C.OTR._Z._Z._Z.XDC_R_B1GQ_CY._Z.S.V.CY._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gtr.columns = ["Date", "EU_GOVERNMENT_TOTAL_REVENUE"]
eu_gtr["Date"] = pd.to_datetime(QtoM(eu_gtr["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_gtr = pd.merge_asof(self.daterange, eu_gtr, on="Date", direction="nearest")
return eu_gtr
def expenditure(self):
"""
* Title: Government total expenditure (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.Q.N.I8.W0.S13.S1.P.D.OTE._Z._Z._T.XDC_R_B1GQ_CY._Z.S.V.CY._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gte = ecb.get_data(datacode="GFS", key="Q.N.I8.W0.S13.S1.P.D.OTE._Z._Z._T.XDC_R_B1GQ_CY._Z.S.V.CY._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gte.columns = ["Date", "EU_GOVERNMENT_TOTAL_EXPENDITURE"]
eu_gte["Date"] = pd.to_datetime(QtoM(eu_gte["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_gte = pd.merge_asof(self.daterange, eu_gte, on="Date", direction="nearest")
return eu_gte
def interest_expenditure(self):
"""
* Title: Government interest expenditure (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.Q.N.I8.W0.S13.S1.C.D.D41._Z._Z._T.XDC_R_B1GQ_CY._Z.S.V.CY._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gie = ecb.get_data(datacode="GFS", key="Q.N.I8.W0.S13.S1.C.D.D41._Z._Z._T.XDC_R_B1GQ_CY._Z.S.V.CY._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gie.columns = ["Date", "EU_GOVERNMENT_INTEREST_EXPENDITURE"]
eu_gie["Date"] = pd.to_datetime(QtoM(eu_gie["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_gie = pd.merge_asof(self.daterange, eu_gie, on="Date", direction="nearest")
return eu_gie
def investment_expenditure(self):
"""
* Title: Government investment expenditure (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.Q.N.I8.W0.S13.S1.N.D.P51G._Z._Z._T.XDC_R_B1GQ_CY._Z.S.V.CY._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_gie = ecb.get_data(datacode="GFS", key="Q.N.I8.W0.S13.S1.N.D.P51G._Z._Z._T.XDC_R_B1GQ_CY._Z.S.V.CY._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_gie.columns = ["Date", "EU_GOVERNMENT_INVESTMENT_EXPENDITURE"]
eu_gie["Date"] = pd.to_datetime(QtoM(eu_gie["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_gie = pd.merge_asof(self.daterange, eu_gie, on="Date", direction="nearest")
return eu_gie
def balance(self):
"""
* Title: Government deficit(-) or surplus(+) (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.Q.N.I8.W0.S13.S1._Z.B.B9._Z._Z._Z.XDC_R_B1GQ_CY._Z.S.V.CY._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_balance = ecb.get_data(datacode="GFS", key="Q.N.I8.W0.S13.S1._Z.B.B9._Z._Z._Z.XDC_R_B1GQ_CY._Z.S.V.CY._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_balance.columns = ["Date", "EU_GOVERNMENT_BALANCE"]
eu_balance["Date"] = pd.to_datetime(QtoM(eu_balance["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_balance = pd.merge_asof(self.daterange, eu_balance, on="Date", direction="nearest")
return eu_balance
class general_government_debt(fiscal_sector):
## National Account (current price)
def __init__(self):
super(general_government_debt, self).__init__()
pass
def gross_outstanding_debt_total(self):
"""
* Title: Government debt (consolidated) (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W0.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W0.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_TOTAL"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
def gross_outstanding_debt_in_euro(self):
"""
* Title: Government debt denominated in national currency and euro (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W0.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ.EUR.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W0.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ.EUR.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_IN_EURO"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
def gross_outstanding_debt_no_euro(self):
"""
* Title: Government debt denominated in currencies other than national currency and euro (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W0.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ.XNC.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W0.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ.XNC.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_NO_EURO"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
def gross_outstanding_debt_resident(self):
"""
* Title: Government debt held by residents (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W2.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W2.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_RESIDENTS"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
def gross_outstanding_debt_mfi(self):
"""
* Title: Government debt held by monetary financial institutions (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W2.S13.S12K.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W2.S13.S12K.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_MFI"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
def gross_outstanding_debt_non_mfi(self):
"""
* Title: Government debt held by financial institutions other than monetary financial institutions (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W2.S13.S12P.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W2.S13.S12P.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_NON_MFI"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
def gross_outstanding_debt_non_fin_sector(self):
"""
* Title: Government debt held by the non-financial sectors (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W2.S13.S1U.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W2.S13.S1U.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_NON_FIN_SECTOR"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
def gross_outstanding_debt_non_resident(self):
"""
* Title: Government debt held by non-residents (as % of GDP)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=325.GFS.A.N.I8.W1.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Yearly
"""
eu_od = ecb.get_data(datacode="GFS", key="A.N.I8.W1.S13.S1.C.L.LE.GD.T._Z.XDC_R_B1GQ._T.F.V.N._T", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_od.columns = ["Date", "EU_GOVERNMENT_OUT_STANDING_DEBT_NON_RESIDENT"]
eu_od["Date"] = pd.to_datetime(eu_od["Date"], format="%Y") + pd.tseries.offsets.MonthBegin(-1)
eu_od = pd.merge_asof(self.daterange, eu_od, on="Date", direction="nearest")
return eu_od
class financial_sector():
def __init__(self, startdate=startdate, enddate=enddate, daterange=daterange):
self.startdate = startdate
self.enddate = enddate
self.daterange = daterange
class analytical_accounts_of_the_banking_sector(financial_sector):
## National Account (current price)
def __init__(self):
super(analytical_accounts_of_the_banking_sector, self).__init__()
pass
def monetary_aggrate_m1(self):
"""
* Title: Monetary aggregate M1 vis-a-vis euro area non-MFI excl. central gov. reported by MFI & central gov. & post office giro Inst. in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.V.M10.X.1.U2.2300.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_m1 = ecb.get_data(datacode="BSI", key="M.U2.Y.V.M30.X.1.U2.2300.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_m1.columns = ["Date", "EU_MONETARY_AGGRATE_M3"]
eu_m1["Date"] = pd.to_datetime(eu_m1["Date"], format="%Y-%m-%d")
return eu_m1
def monetary_aggrate_m2(self):
"""
* Title: Monetary aggregate M2 vis-a-vis euro area non-MFI excl. central gov. reported by MFI & central gov. & post office giro Inst. in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.V.M10.X.1.U2.2300.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_m2 = ecb.get_data(datacode="BSI", key="M.U2.Y.V.M20.X.1.U2.2300.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_m2.columns = ["Date", "EU_MONETARY_AGGRATE_M3"]
eu_m2["Date"] = pd.to_datetime(eu_m2["Date"], format="%Y-%m-%d")
return eu_m2
def monetary_aggrate_m3(self):
"""
* Title: Monetary aggregate M3 vis-a-vis euro area non-MFI excl. central gov. reported by MFI & central gov. & post office giro Inst. in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.V.M30.X.1.U2.2300.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_m3 = ecb.get_data(datacode="BSI", key="M.U2.Y.V.M30.X.1.U2.2300.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_m3.columns = ["Date", "EU_MONETARY_AGGRATE_M3"]
eu_m3["Date"] = pd.to_datetime(eu_m3["Date"], format="%Y-%m-%d")
return eu_m3
def domestic_credit(self):
"""
* Title: Total loans and securities vis-a-vis euro area non-MFI reported by MFI in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.U.AT2.A.1.U2.2000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_dc = ecb.get_data(datacode="BSI", key="M.U2.Y.U.AT2.A.1.U2.2000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_dc.columns = ["Date", "EU_DOMESTIC_CREDIT"]
eu_dc["Date"] = pd.to_datetime(eu_dc["Date"], format="%Y-%m-%d")
return eu_dc
def credit_general_government(self):
"""
* Title: Total loans and securities vis-a-vis euro area General Government reported by MFI in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.U.AT2.A.1.U2.2100.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_dc = ecb.get_data(datacode="BSI", key="M.U2.Y.U.AT2.A.1.U2.2100.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_dc.columns = ["Date", "EU_GOVERNMENT_CREDIT"]
eu_dc["Date"] = pd.to_datetime(eu_dc["Date"], format="%Y-%m-%d")
return eu_dc
def credit_general_other_resident(self):
"""
* Title: Total loans and securities vis-a-vis euro area non-MFI excl. general gov. reported by MFI in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.U.AT2.A.1.U2.2200.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_dc = ecb.get_data(datacode="BSI", key="M.U2.Y.U.AT2.A.1.U2.2200.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_dc.columns = ["Date", "EU_OTHER_RESIDENT_CREDIT"]
eu_dc["Date"] = pd.to_datetime(eu_dc["Date"], format="%Y-%m-%d")
return eu_dc
def external_assets(self):
"""
* Title: External assets reported by MFI in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.U.AXG.A.1.U4.0000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_ea = ecb.get_data(datacode="BSI", key="M.U2.Y.U.AXG.A.1.U4.0000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ea.columns = ["Date", "EU_EXTERNAL_ASSETS"]
eu_ea["Date"] = pd.to_datetime(eu_ea["Date"], format="%Y-%m-%d")
return eu_ea
def external_liabilities(self):
"""
* Title: External liabilities reported by MFI in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.Y.U.LXG.A.1.U4.0000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_el = ecb.get_data(datacode="BSI", key="M.U2.Y.U.LXG.A.1.U4.0000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_el.columns = ["Date", "EU_EXTERNAL_LIABILITIES"]
eu_el["Date"] = pd.to_datetime(eu_el["Date"], format="%Y-%m-%d")
return eu_el
class analytical_accounts_of_the_central_banks(financial_sector):
## National Account (current price)
def __init__(self):
super(analytical_accounts_of_the_banking_sector, self).__init__()
pass
def currency_in_circulation(self):
"""
* Title: Currency in circulation reported by Eurosystem in the euro area (stock)
* URL:https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=117.BSI.M.U2.N.C.L10.X.1.Z5.0000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_cc = ecb.get_data(datacode="BSI", key="M.U2.N.C.L10.X.1.Z5.0000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_cc.columns = ["Date", "EU_CURRENCY_IN_CIRCULATION"]
eu_cc["Date"] = pd.to_datetime(eu_cc["Date"], format="%Y-%m-%d")
return eu_cc
def deposits_at_eurosystem_mfi(self):
"""
* Title: Deposit liabilities vis-a-vis euro area MFI reported by Eurosystem in the euro area (stock)
* URL:https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=117.BSI.M.U2.N.C.L20.A.1.U2.1000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_cc = ecb.get_data(datacode="BSI", key="M.U2.N.C.L20.A.1.U2.1000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_cc.columns = ["Date", "EU_DEPOSITS_AT_EUROSYSTEM_MFI"]
eu_cc["Date"] = pd.to_datetime(eu_cc["Date"], format="%Y-%m-%d")
return eu_cc
def credit(self):
"""
* Title: Total loans and securities vis-a-vis euro area non-MFI reported by Eurosystem in the euro area (stock)
* URL:https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=117.BSI.M.U2.N.C.AT2.A.1.U2.2000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_cc = ecb.get_data(datacode="BSI", key="M.U2.N.C.AT2.A.1.U2.2000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_cc.columns = ["Date", "EU_CREDIT"]
eu_cc["Date"] = pd.to_datetime(eu_cc["Date"], format="%Y-%m-%d")
return eu_cc
def credit_to_general_governemnt(self):
"""
* Title: Total loans and securities vis-a-vis euro area non-MFI reported by Eurosystem in the euro area (stock)
* URL:https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=117.BSI.M.U2.N.C.AT2.A.1.U2.2100.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_cc = ecb.get_data(datacode="BSI", key="M.U2.N.C.AT2.A.1.U2.2100.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_cc.columns = ["Date", "EU_CREDIT_TO_GENERAL_GOVERNMENT"]
eu_cc["Date"] = pd.to_datetime(eu_cc["Date"], format="%Y-%m-%d")
return eu_cc
def credit_to_other_resident_sector(self):
"""
* Title: Total loans and securities vis-a-vis euro area non-MFI reported by Eurosystem in the euro area (stock)
* URL:https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=117.BSI.M.U2.N.C.AT2.A.1.U2.2200.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_cc = ecb.get_data(datacode="BSI", key="M.U2.N.C.AT2.A.1.U2.2200.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_cc.columns = ["Date", "EU_CREDIT_TO_OTHER_RESIDENT"]
eu_cc["Date"] = pd.to_datetime(eu_cc["Date"], format="%Y-%m-%d")
return eu_cc
def external_assets(self):
"""
* Title: External assets reported by Eurosystem in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.N.C.AXG.A.1.U4.0000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_ea = ecb.get_data(datacode="BSI", key="M.U2.N.C.AXG.A.1.U4.0000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ea.columns = ["Date", "EU_EXTERNAL_ASSETS"]
eu_ea["Date"] = pd.to_datetime(eu_ea["Date"], format="%Y-%m-%d")
return eu_ea
def external_liabilities(self):
"""
* Title: External liabilities reported by Eurosystem in the euro area (stock)
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=BSI.M.U2.N.C.LXG.A.1.U4.0000.Z01.E
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_el = ecb.get_data(datacode="BSI", key="M.U2.N.C.LXG.A.1.U4.0000.Z01.E", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_el.columns = ["Date", "EU_EXTERNAL_LIABILITIES"]
eu_el["Date"] = pd.to_datetime(eu_el["Date"], format="%Y-%m-%d")
return eu_el
class interest_rate(financial_sector):
## National Account (current price)
def __init__(self):
super(interest_rate, self).__init__()
pass
def one_year_interbank(self):
"""
* Title: Euribor 1-year - Historical close, average of observations through period
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=143.FM.M.U2.EUR.RT.MM.EURIBOR1YD_.HSTA
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_el = ecb.get_data(datacode="FM", key="M.U2.EUR.RT.MM.EURIBOR1YD_.HSTA", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_el.columns = ["Date", "EU_ONE_YEAR_INTERBANK_RATE"]
eu_el["Date"] = pd.to_datetime(eu_el["Date"], format="%Y-%m-%d")
return eu_el
def ten_year_government_banchmar_bond_yild(self):
"""
* Title: Euro area 10-year Government Benchmark bond yield - Yield
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=143.FM.M.U2.EUR.4F.BB.U2_10Y.YLD
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_el = ecb.get_data(datacode="FM", key="M.U2.EUR.4F.BB.U2_10Y.YLD", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_el.columns = ["Date", "EU_TEN_YEART_BOND_RATE"]
eu_el["Date"] = pd.to_datetime(eu_el["Date"], format="%Y-%m-%d")
return eu_el
class external_sector():
def __init__(self, startdate=startdate, enddate=enddate, daterange=daterange):
self.startdate = startdate
self.enddate = enddate
self.daterange = daterange
class balance_of_payments(external_sector):
## National Account (current price)
def __init__(self):
super(balance_of_payments, self).__init__()
pass
def net_current_account(self):
"""
* Title: Current account
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.Y.I8.W1.S1.S1.T.B.CA._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_ca = ecb.get_data(datacode="BP6", key="M.Y.I8.W1.S1.S1.T.B.CA._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ca.columns = ["Date", "EU_CURRENT_ACCOUNT"]
eu_ca["Date"] = pd.to_datetime(eu_ca["Date"], format="%Y-%m-%d")
return eu_ca
def exports_goods(self):
"""
* Title: Goods
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.Y.I8.W1.S1.S1.T.C.G._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_eg = ecb.get_data(datacode="BP6", key="M.Y.I8.W1.S1.S1.T.C.G._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_eg.columns = ["Date", "EU_EXPORT_GOODS"]
eu_eg["Date"] = pd.to_datetime(eu_eg["Date"], format="%Y-%m-%d")
return eu_ca
def exports_services(self):
"""
* Title: Services
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.Y.I8.W1.S1.S1.T.C.S._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_eg = ecb.get_data(datacode="BP6", key="M.Y.I8.W1.S1.S1.T.C.S._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_es.columns = ["Date", "EU_EXPORT_SERVICES"]
eu_es["Date"] = pd.to_datetime(eu_es["Date"], format="%Y-%m-%d")
return eu_es
def imports_goods(self):
"""
* Title: Goods
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.Y.I8.W1.S1.S1.T.D.G._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_ig = ecb.get_data(datacode="BP6", key="M.Y.I8.W1.S1.S1.T.D.G._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ig.columns = ["Date", "EU_IMPORT_GOODS"]
eu_ig["Date"] = pd.to_datetime(eu_ig["Date"], format="%Y-%m-%d")
return eu_ca
def import_services(self):
"""
* Title: Services
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.Y.I8.W1.S1.S1.T.D.S._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_is = ecb.get_data(datacode="BP6", key="M.Y.I8.W1.S1.S1.T.D.S._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_is.columns = ["Date", "EU_IMPORT_SERVICES"]
eu_is["Date"] = pd.to_datetime(eu_is["Date"], format="%Y-%m-%d")
return eu_is
def net_primary_income(self):
"""
* Title: Primary income
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.Y.I8.W1.S1.S1.T.B.IN1._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_npi = ecb.get_data(datacode="BP6", key="M.Y.I8.W1.S1.S1.T.B.IN1._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_npi.columns = ["Date", "EU_NET_PRIMARY_INCOME"]
eu_npi["Date"] = pd.to_datetime(eu_npi["Date"], format="%Y-%m-%d")
return eu_npi
def net_secondary_income(self):
"""
* Title: Secondary income
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.Y.I8.W1.S1.S1.T.B.IN2._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_nsi = ecb.get_data(datacode="BP6", key="M.Y.I8.W1.S1.S1.T.B.IN2._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_nsi.columns = ["Date", "EU_NET_SECONDARY_INCOME"]
eu_nsi["Date"] = pd.to_datetime(eu_nsi["Date"], format="%Y-%m-%d")
return eu_nsi
def net_capital_account(self):
"""
* Title: Capitial account
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.N.I8.W1.S1.S1.T.B.KA._Z._Z._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_nca = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.B.KA._Z._Z._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_nca.columns = ["Date", "EU_NET_CAPTIAL_ACCOUNT"]
eu_nca["Date"] = pd.to_datetime(eu_nca["Date"], format="%Y-%m-%d")
return eu_nca
def net_financial_account(self):
"""
* Title: Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.N.I8.W1.S1.S1.T.N.FA._T.F._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_nfa = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.N.FA._T.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_nfa.columns = ["Date", "EU_NET_FINANCIAL_ACCOUNT"]
eu_nfa["Date"] = pd.to_datetime(eu_nfa["Date"], format="%Y-%m-%d")
return eu_nfa
def direct_investment(self):
"""
* Title: Direct Investment, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.N.I8.W1.S1.S1.T.N.FA.D.F._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_di = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.N.FA.D.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_dia = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.A.FA.D.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_dil = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.L.FA.D.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_di.columns = ["Date", "EU_DIRECT_INVESTMENT"]
eu_di["Date"] = pd.to_datetime(eu_di["Date"], format="%Y-%m-%d")
eu_di["EU_DIRECT_INVESTMENT_ASSETS"], eu_di["EU_DIRECT_INVESTMENT_LIABILITIES"] = eu_dia["OBS_VALUE"], eu_dil["OBS_VALUE"]
return eu_di
def porfolio_investment(self):
"""
* Title: Portfolio Investment, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.N.I8.W1.S1.S1.T.N.FA.P.F._Z.EUR._T.M.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_pi = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.N.FA.P.F._Z.EUR._T.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pia = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.A.FA.P.F._Z.EUR._T.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pil = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.L.FA.P.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pi.columns = ["Date", "EU_PORFOLIO_INVESTMENT"]
eu_pi["Date"] = pd.to_datetime(eu_pi["Date"], format="%Y-%m-%d")
eu_pi["EU_PORFOLIO_INVESTMENT_ASSETS"], eu_pi["EU_PORFOLIO_INVESTMENT_LIABILITIES"] = eu_pia["OBS_VALUE"], eu_pil["OBS_VALUE"]
return eu_pi
def other_investment(self):
"""
* Title: Other Investment, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.N.I8.W1.S1.S1.T.N.FA.O.F._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_pi = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.N.FA.O.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pia = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.A.FA.O.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pil = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.L.FA.O.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pi.columns = ["Date", "EU_OTHER_INVESTMENT"]
eu_pi["Date"] = pd.to_datetime(eu_pi["Date"], format="%Y-%m-%d")
eu_pi["EU_OTHER_INVESTMENT_ASSETS"], eu_pi["EU_OTHER_INVESTMENT_LIABILITIES"] = eu_pia["OBS_VALUE"], eu_pil["OBS_VALUE"]
return eu_pi
def financial_derivatives(self):
"""
* Title: Financial Derivatives and Employee Stock Options, Financial derivatives and employee stock options
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.N.I8.W1.S1.S1.T.N.FA.F.F7.T.EUR._T.T.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_nfa = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S1.S1.T.N.FA.F.F7.T.EUR._T.T.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_nfa.columns = ["Date", "EU_FINANCIAL_DERIVATIVES"]
eu_nfa["Date"] = pd.to_datetime(eu_nfa["Date"], format="%Y-%m-%d")
return eu_nfa
def reserve_assets(self):
"""
* Title: Reserve Assets, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.M.N.I8.W1.S121.S1.T.A.FA.R.F._Z.EUR.X1._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_nfa = ecb.get_data(datacode="BP6", key="M.N.I8.W1.S121.S1.T.A.FA.R.F._Z.EUR.X1._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_nfa.columns = ["Date", "EU_RESERVE_ASSETS"]
eu_nfa["Date"] = pd.to_datetime(eu_nfa["Date"], format="%Y-%m-%d")
return eu_nfa
class international_reserves_and_foreign_currency_liquidity(external_sector):
## National Account (current price)
def __init__(self):
super(balance_of_payments, self).__init__()
pass
def official_reserve_assets(self):
"""
* Title: Official reserve assets
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=340.RA6.M.N.U2.W1.S121.S1.LE.A.FA.R.F._Z.EUR.X1._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_ora = ecb.get_data(datacode="RA6", key="M.N.U2.W1.S121.S1.LE.A.FA.R.F._Z.EUR.X1._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ora.columns = ["Date", "EU_OFFICIAL_RESERVE_ASSETS"]
eu_ora["Date"] = pd.to_datetime(eu_ora["Date"], format="%Y-%m-%d")
return eu_ora
def monetary_gold(self):
"""
* Title: Monetary gold
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=340.RA6.M.N.U2.W1.S121.S1.LE.A.FA.R.F11._Z.EUR.XAU.M.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_mg = ecb.get_data(datacode="RA6", key="M.N.U2.W1.S121.S1.LE.A.FA.R.F11._Z.EUR.XAU.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_mg.columns = ["Date", "EU_MONETARY_GOLD"]
eu_mg["Date"] = pd.to_datetime(eu_mg["Date"], format="%Y-%m-%d")
return eu_mg
def imf_reserve_position(self):
"""
* Title: Reserve position in the IMF
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=340.RA6.M.N.U2.1C.S121.S121.LE.A.FA.R.FK._Z.EUR.XDR.M.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_img_rp = ecb.get_data(datacode="RA6", key="M.N.U2.1C.S121.S121.LE.A.FA.R.FK._Z.EUR.XDR.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_img_rp.columns = ["Date", "EU_IMF_RESERVE_POSITION"]
eu_img_rp["Date"] = pd.to_datetime(eu_img_rp["Date"], format="%Y-%m-%d")
return eu_img_rp
def sdr(self):
"""
* Title: Reserve position in the IMF
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=340.RA6.M.N.U2.W1.S121.S1N.LE.A.FA.R.F12.T.EUR.XDR.M.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_img_rp = ecb.get_data(datacode="RA6", key="M.N.U2.W1.S121.S1N.LE.A.FA.R.F12.T.EUR.XDR.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_img_rp.columns = ["Date", "EU_SDR"]
eu_img_rp["Date"] = pd.to_datetime(eu_img_rp["Date"], format="%Y-%m-%d")
return eu_img_rp
def other_reserve_assets(self):
"""
* Title: Other reserve assets
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=340.RA6.M.N.U2.W1.S121.S1.LE.A.FA.R.FR2._Z.EUR.X1._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_img_rp = ecb.get_data(datacode="RA6", key="M.N.U2.W1.S121.S1.LE.A.FA.R.FR2._Z.EUR.X1._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_img_rp.columns = ["Date", "EU_OTHER_RESERVE_ASSETS"]
eu_img_rp["Date"] = pd.to_datetime(eu_img_rp["Date"], format="%Y-%m-%d")
return eu_img_rp
def other_foreign_currency_assets(self):
"""
* Title: Other foreign currency assets (not included in reserve assets)
* URL: http://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=340.RA6.M.N.U2.W0.S121.S1.LE.A.FA.RT.F._Z.EUR.X1._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_img_rp = ecb.get_data(datacode="RA6", key="M.N.U2.W0.S121.S1.LE.A.FA.RT.F._Z.EUR.X1._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_img_rp.columns = ["Date", "EU_FOREIGN_CURRENCY_ASSETS"]
eu_img_rp["Date"] = pd.to_datetime(eu_img_rp["Date"], format="%Y-%m-%d")
return eu_img_rp
def predeterminated_short_term_net_drains_on_foreign_currency_assets(self):
"""
* Title: Not applicable, Total financial assets/liabilities
* URL: http://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=340.RA6.M.N.U2.W0.S121.S1.FP.FN._Z.RT.F.TS.EUR.X1.N.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_img_rp = ecb.get_data(datacode="RA6", key="M.N.U2.W0.S121.S1.FP.FN._Z.RT.F.TS.EUR.X1.N.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_img_rp.columns = ["Date", "EU_TOTAL_FINANCIAL_ASSETS"]
eu_img_rp["Date"] = pd.to_datetime(eu_img_rp["Date"], format="%Y-%m-%d")
return eu_img_rp
class merchandise_trade(external_sector):
## National Account (current price)
def __init__(self):
super(merchandise_trade, self).__init__()
pass
def merchandise_trade(self):
"""
* Title: Total trade, Value (Community concept) (Export/Import)
* URL1: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=133.TRD.M.I8.Y.X.TTT.J8.4.VAL
* URL2: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=133.TRD.M.I8.Y.M.TTT.J8.4.VAL
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Monthly
"""
eu_mte = ecb.get_data(datacode="TRD", key="M.I8.Y.X.TTT.J8.4.VAL", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_mti = ecb.get_data(datacode="TRD", key="M.I8.Y.M.TTT.J8.4.VAL", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_mte.columns = ["Date", "EU_MERCHANDISE_TRADE_EXPORT"]
eu_mte["Date"] = pd.to_datetime(eu_mte["Date"], format="%Y-%m-%d")
eu_mte["EU_MERCHANDISE_TRADE_IMPORT"] = eu_mti["OBS_VALUE"]
return eu_mte
class international_investment_position(external_sector):
## National Account (current price)
def __init__(self):
super(international_investment_position, self).__init__()
pass
def total_net_international_investment_position(self):
"""
* Title: Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.Q.N.I8.W1.S1.S1.LE.N.FA._T.F._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_tniip = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.N.FA._T.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_tniip.columns = ["Date", "EU_TOTAL_FINANCIAL_ASSETS"]
eu_tniip["Date"] = pd.to_datetime(QtoM(eu_tniip["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
return eu_tniip
def direct_investment(self):
"""
* Title: Direct Investment, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.Q.N.I8.W1.S1.S1.LE.N.FA.D.F._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_di = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.N.FA.D.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_dia = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.A.FA.D.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_dil = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.L.FA.D.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_di.columns = ["Date", "EU_DIRECT_INVESTMENT"]
eu_dia.columns = ["Date", "EU_DIRECT_INVESTMENT_ASSETS"]
eu_dil.columns = ["Date", "EU_DIRECT_INVESTMENT_LIABILITIES"]
eu_di["Date"] = pd.to_datetime(QtoM(eu_di["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_dia["Date"] = pd.to_datetime(QtoM(eu_dia["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_dil["Date"] = pd.to_datetime(QtoM(eu_dil["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_di = pd.merge_asof(eu_di, eu_dia, on="Date")
eu_di = pd.merge_asof(eu_di, eu_dil, on="Date")
return eu_di
def portfolio_investment(self):
"""
* Title: Direct Investment, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.Q.N.I8.W1.S1.S1.LE.N.FA.P.F._Z.EUR._T.M.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_pi = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.N.FA.P.F._Z.EUR._T.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pia = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.A.FA.P.F5._Z.EUR._T.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pil = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.L.FA.P.F5._Z.EUR._T.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pida = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.A.FA.P.F3.T.EUR._T.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pidl = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.L.FA.P.F3.T.EUR._T.M.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_pi.columns = ["Date", "EU_PORTFOLIO_INVESTMENT"]
eu_pia.columns = ["Date", "EU_EQUITY_AND_INVESTMENT_FUND_ASSETS"]
eu_pil.columns = ["Date", "EU_EQUITY_AND_INVESTMENT_FUND_LIABILITIES"]
eu_pida.columns = ["Date", "EU_DEBT_SECURITIES_aSSETS"]
eu_pidl.columns = ["Date", "EU_DEBT_SECURITIES_LIABILITIES"]
eu_pi["Date"] = pd.to_datetime(QtoM(eu_pi["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_pia["Date"] = pd.to_datetime(QtoM(eu_pia["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_pil["Date"] = pd.to_datetime(QtoM(eu_pil["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_pida["Date"] = pd.to_datetime(QtoM(eu_pil["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_pidl["Date"] = pd.to_datetime(QtoM(eu_pil["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_pi = pd.merge_asof(eu_pi, eu_pia, on="Date")
eu_pi = pd.merge_asof(eu_pi, eu_pil, on="Date")
eu_pi = pd.merge_asof(eu_pi, eu_pida, on="Date")
eu_pi = pd.merge_asof(eu_pi, eu_pidl, on="Date")
return eu_pi
def other_investment(self):
"""
* Title: Other Investment, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.Q.N.I8.W1.S1.S1.LE.N.FA.O.F._Z.EUR._T._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_oi = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.N.FA.O.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_oia = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.A.FA.O.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_oil = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.L.FA.O.F._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_oi.columns = ["Date", "EU_DIRECT_INVESTMENT"]
eu_oia.columns = ["Date", "EU_DIRECT_INVESTMENT_ASSETS"]
eu_oil.columns = ["Date", "EU_DIRECT_INVESTMENT_LIABILITIES"]
eu_oi["Date"] = pd.to_datetime(QtoM(eu_oi["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_oia["Date"] = pd.to_datetime(QtoM(eu_oia["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_oil["Date"] = pd.to_datetime(QtoM(eu_oil["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
eu_oi = pd.merge_asof(eu_oi, eu_oia, on="Date")
eu_oi = pd.merge_asof(eu_oi, eu_oil, on="Date")
return eu_di
def reserve_assetsc(self):
"""
* Title: Reserve Assets, Total financial assets/liabilities
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.Q.N.I8.W1.S121.S1.LE.A.FA.R.F._Z.EUR.X1._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_ra = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S121.S1.LE.A.FA.R.F._Z.EUR.X1._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ra.columns = ["Date", "EU_OTHER_INVESTMENT_ASSETS"]
eu_ra["Date"] = pd.to_datetime(QtoM(eu_ra["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
return eu_ra
def gross_external_debt(self):
"""
* Title: Gross external debt
* URL: https://sdw.ecb.europa.eu/quickview.do?SERIES_KEY=338.BP6.Q.N.I8.W1.S121.S1.LE.A.FA.R.F._Z.EUR.X1._X.N
* Reference area: Euro area 19 (fixed composition) as of 1 January 2015 (I8)
* Frequency: Quarterly
"""
eu_ged = ecb.get_data(datacode="BP6", key="Q.N.I8.W1.S1.S1.LE.L.FA._T.FGED._Z.EUR._T._X.N", startdate=self.startdate, enddate=self.enddate)[["TIME_PERIOD", "OBS_VALUE"]]
eu_ged.columns = ["Date", "EU_GROSS_EXTERNAL_DEBT"]
eu_ged["Date"] = pd.to_datetime(QtoM(eu_ged["Date"]), format="%Y-%m") + pd.tseries.offsets.MonthBegin(-1)
return eu_ged
if __name__ == "__main__":
data, name_list = CPI_monthly()
| 83.997585
| 2,337
| 0.703592
| 28,768
| 173,875
| 4.135393
| 0.036534
| 0.016711
| 0.032143
| 0.040347
| 0.873873
| 0.853581
| 0.833055
| 0.819126
| 0.801315
| 0.779502
| 0
| 0.145143
| 0.12873
| 173,875
| 2,069
| 2,338
| 84.038183
| 0.640159
| 0.13692
| 0
| 0.419762
| 0
| 0.106984
| 0.623224
| 0.402027
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105498
| false
| 0.011144
| 0.022288
| 0.001486
| 0.232541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8ca800964c0b72d33b3fef6ca83789794c0ea2b4
| 3,731
|
py
|
Python
|
reveal_graph_embedding/quality/conductance.py
|
MKLab-ITI/reveal-graph-embedding
|
72d4af794536f97b8ede06c0f27f261ea85d8c4b
|
[
"Apache-2.0"
] | 31
|
2015-07-14T16:21:25.000Z
|
2021-06-30T14:10:44.000Z
|
reveal_graph_embedding/quality/conductance.py
|
MKLab-ITI/reveal-graph-embedding
|
72d4af794536f97b8ede06c0f27f261ea85d8c4b
|
[
"Apache-2.0"
] | null | null | null |
reveal_graph_embedding/quality/conductance.py
|
MKLab-ITI/reveal-graph-embedding
|
72d4af794536f97b8ede06c0f27f261ea85d8c4b
|
[
"Apache-2.0"
] | 11
|
2016-08-21T03:07:20.000Z
|
2020-03-07T03:17:05.000Z
|
__author__ = 'Georgios Rizos (georgerizos@iti.gr)'
import numpy as np
def conductance(adjacency_matrix, node_array):
number_of_nodes = adjacency_matrix.shape[0]
node_array_bar = np.setdiff1d(np.arange(number_of_nodes), node_array)
submatrix = adjacency_matrix[np.ix_(node_array, node_array)]
submatrix_bar = adjacency_matrix[np.ix_(node_array_bar, node_array_bar)]
submatrix_volume = submatrix.getnnz() # TODO: If empty?
submatrix_bar_volume = submatrix_bar.getnnz() # TODO: If empty?
matrix_volume = adjacency_matrix.getnnz()
cut_volume = (matrix_volume - submatrix_volume - submatrix_bar_volume)/2
try:
cut_conductance = cut_volume/min(submatrix_volume, submatrix_bar_volume)
except ZeroDivisionError:
cut_conductance = np.Inf
return cut_conductance
def conductance_and_clustering_coefficient(adjacency_matrix, node_array, seed_node):
number_of_nodes = adjacency_matrix.shape[0]
node_array_bar = np.setdiff1d(np.arange(number_of_nodes), node_array)
submatrix = adjacency_matrix[np.ix_(node_array, node_array)]
submatrix_bar = adjacency_matrix[np.ix_(node_array_bar, node_array_bar)]
submatrix_volume = submatrix.getnnz() # TODO: If empty?
submatrix_bar_volume = submatrix_bar.getnnz() # TODO: If empty?
matrix_volume = adjacency_matrix.getnnz()
cut_volume = (matrix_volume - submatrix_volume - submatrix_bar_volume)/2
cut_conductance = cut_volume/min(submatrix_volume, submatrix_bar_volume)
new_node_array = np.setdiff1d(node_array, seed_node)
clustering_coefficient = adjacency_matrix[np.ix_(new_node_array, new_node_array)]
clustering_coefficient = clustering_coefficient.getnnz()/(new_node_array.size*new_node_array.size)
return cut_conductance, clustering_coefficient
def fast_conductance(array_of_arrays, node_array, matrix_volume):
submatrix_volume = 0
cut_volume = 0
for node in node_array:
neighbors = array_of_arrays[node]
degree = neighbors.size
common = np.intersect1d(node_array, neighbors).size
submatrix_volume += common
cut_volume += degree - common
submatrix_bar_volume = matrix_volume - submatrix_volume - 2*cut_volume
try:
cut_conductance = cut_volume/min(submatrix_volume, submatrix_bar_volume)
except ZeroDivisionError:
cut_conductance = np.Inf
return cut_conductance, cut_volume, submatrix_volume
def incremental_conductance(array_of_arrays, node_array, new_node, cut_volume, submatrix_volume, matrix_volume):
# TODO: What if I have ones in the diagonal?
neighbors = array_of_arrays[new_node]
degree = neighbors.size
common = np.intersect1d(node_array, neighbors).size
submatrix_volume += common
cut_volume += degree - common
submatrix_bar_volume = matrix_volume - submatrix_volume - 2*cut_volume
try:
cut_conductance = cut_volume/min(submatrix_volume, submatrix_bar_volume)
except ZeroDivisionError:
cut_conductance = np.Inf
return cut_conductance, cut_volume, submatrix_volume
def decremental_conductance(array_of_arrays, node_array, new_node, cut_volume, submatrix_volume, matrix_volume):
# TODO: What if I have ones in the diagonal?
neighbors = array_of_arrays[new_node]
degree = neighbors.size
common = np.intersect1d(node_array, neighbors).size
submatrix_volume -= common
cut_volume += common - degree
submatrix_bar_volume = matrix_volume - submatrix_volume - 2*cut_volume
try:
cut_conductance = cut_volume/min(submatrix_volume, submatrix_bar_volume)
except ZeroDivisionError:
cut_conductance = np.Inf
return cut_conductance, cut_volume, submatrix_volume
| 34.869159
| 112
| 0.755562
| 479
| 3,731
| 5.496868
| 0.127349
| 0.09229
| 0.082036
| 0.069882
| 0.809343
| 0.809343
| 0.79681
| 0.79681
| 0.79681
| 0.79681
| 0
| 0.004847
| 0.170464
| 3,731
| 106
| 113
| 35.198113
| 0.84588
| 0.039936
| 0
| 0.73913
| 0
| 0
| 0.00979
| 0
| 0
| 0
| 0
| 0.009434
| 0
| 1
| 0.072464
| false
| 0
| 0.014493
| 0
| 0.15942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8ced7434cb879444a7aa7c69a5a7671e68b30702
| 36
|
py
|
Python
|
simpleui/__init__.py
|
skyformat99/simpleui-1
|
84e68ffe27f261ed1fbc889430e61fdd9add7657
|
[
"MIT"
] | null | null | null |
simpleui/__init__.py
|
skyformat99/simpleui-1
|
84e68ffe27f261ed1fbc889430e61fdd9add7657
|
[
"MIT"
] | null | null | null |
simpleui/__init__.py
|
skyformat99/simpleui-1
|
84e68ffe27f261ed1fbc889430e61fdd9add7657
|
[
"MIT"
] | 1
|
2019-08-27T18:05:36.000Z
|
2019-08-27T18:05:36.000Z
|
def get_version():
return '2.8'
| 12
| 18
| 0.611111
| 6
| 36
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.222222
| 36
| 2
| 19
| 18
| 0.678571
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0fe72a6430b726017aba6aca0045eab7c5759123
| 142
|
py
|
Python
|
pylogger/utils/idgenerator.py
|
agSant01/pylogger
|
99a5d08b0f486c43dc4936cd89474e21a86f377a
|
[
"MIT"
] | null | null | null |
pylogger/utils/idgenerator.py
|
agSant01/pylogger
|
99a5d08b0f486c43dc4936cd89474e21a86f377a
|
[
"MIT"
] | null | null | null |
pylogger/utils/idgenerator.py
|
agSant01/pylogger
|
99a5d08b0f486c43dc4936cd89474e21a86f377a
|
[
"MIT"
] | null | null | null |
import string
import random
def id_generator():
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
| 20.285714
| 91
| 0.746479
| 20
| 142
| 5.15
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008197
| 0.140845
| 142
| 6
| 92
| 23.666667
| 0.836066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ba0a080b0cf1de1be674bf37d6678cbbececade9
| 2,653
|
py
|
Python
|
mahjonggscoring/rules/test/test_melded_kong.py
|
kajiki/py-mahjongg-scoring
|
b23202de018a8206a4be5594247faa0754e7e54e
|
[
"MIT"
] | null | null | null |
mahjonggscoring/rules/test/test_melded_kong.py
|
kajiki/py-mahjongg-scoring
|
b23202de018a8206a4be5594247faa0754e7e54e
|
[
"MIT"
] | null | null | null |
mahjonggscoring/rules/test/test_melded_kong.py
|
kajiki/py-mahjongg-scoring
|
b23202de018a8206a4be5594247faa0754e7e54e
|
[
"MIT"
] | null | null | null |
import unittest2
from mahjonggscoring.rules import MeldedKong
from mahjonggscoring import Hand
class TestMeldedKongPartial(unittest2.TestCase):
def setUp(self):
data = [["6/", "6/", "6/", "6/"], ["2/", "3/", "4/"], ["F", "F", "F"], ["2/", "3/", "4/"], ["8/", "8/"]]
hand = Hand(data, {"concealed": [False, True, False, False, False]})
self.examination = MeldedKong(hand)
self.passed = self.examination.evaluate()
def test_passed(self):
self.assertTrue(self.passed)
def test_points(self):
self.assertEqual(self.examination.points, 1)
class TestMeldedKongExplicit(unittest2.TestCase):
def setUp(self):
data = [["6/", "6/", "6/", "6/"], ["2/", "3/", "4/"], ["F", "F", "F"], ["2/", "3/", "4/"], ["8/", "8/"]]
hand = Hand(data, {"concealed": False})
self.examination = MeldedKong(hand)
self.passed = self.examination.evaluate()
def test_passed(self):
self.assertTrue(self.passed)
def test_points(self):
self.assertEqual(self.examination.points, 1)
class TestMeldedKongImplicit(unittest2.TestCase):
def setUp(self):
data = [["6/", "6/", "6/", "6/"], ["2/", "3/", "4/"], ["F", "F", "F"], ["2/", "3/", "4/"], ["8/", "8/"]]
hand = Hand(data)
self.examination = MeldedKong(hand)
self.passed = self.examination.evaluate()
def test_passed(self):
self.assertTrue(self.passed)
def test_points(self):
self.assertEqual(self.examination.points, 1)
class TestNotMeldedKong(unittest2.TestCase):
def test_not_kong(self):
data = [["6/", "6/", "6/"], ["2/", "3/", "4/"], ["F", "F", "F"], ["2/", "3/", "4/"], ["8/", "8/"]]
hand = Hand(data)
self.examination = MeldedKong(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
def test_not_melded(self):
data = [["6/", "6/", "6/", "6/"], ["2/", "3/", "4/"], ["F", "F", "F"], ["2/", "3/", "4/"], ["8/", "8/"]]
hand = Hand(data, {"concealed": [True, False, False, False, False]})
self.examination = MeldedKong(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
def test_two_melded(self):
data = [["6/", "6/", "6/", "6/"], ["2/", "3/", "4/"], ["F", "F", "F", "F"], ["2/", "3/", "4/"], ["8/", "8/"]]
hand = Hand(data, {"concealed": [False, True, False, True, False]})
self.examination = MeldedKong(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
def test_special_hand(self):
data = [["5/", "5/", "3/", "3/", "4/", "4/", "8/", "8/", "6/", "6/", "7/", "7/", "5/", "5/"]]
hand = Hand(data)
self.examination = MeldedKong(hand)
self.passed = self.examination.evaluate()
self.assertFalse(self.passed)
if __name__ == '__main__':
unittest2.main()
| 35.851351
| 111
| 0.588767
| 346
| 2,653
| 4.450867
| 0.130058
| 0.023377
| 0.023377
| 0.131818
| 0.82013
| 0.814935
| 0.814935
| 0.814935
| 0.814935
| 0.814935
| 0
| 0.041264
| 0.141349
| 2,653
| 74
| 112
| 35.851351
| 0.634767
| 0
| 0
| 0.666667
| 0
| 0
| 0.087792
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.216667
| false
| 0.283333
| 0.05
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ba0b71fea3955a06b0fb30ee870bffb2695154ef
| 5,223
|
py
|
Python
|
picture/tests.py
|
ENAIKA/picture
|
4a5f81a6fc3f56b0322ed753af0e601cc0ec8149
|
[
"Unlicense"
] | null | null | null |
picture/tests.py
|
ENAIKA/picture
|
4a5f81a6fc3f56b0322ed753af0e601cc0ec8149
|
[
"Unlicense"
] | 8
|
2021-03-19T04:10:34.000Z
|
2021-09-22T19:06:00.000Z
|
picture/tests.py
|
ENAIKA/picture
|
4a5f81a6fc3f56b0322ed753af0e601cc0ec8149
|
[
"Unlicense"
] | null | null | null |
from django.test import TestCase
from .models import PhotoImage,Category,Location
import pyperclip
# Create your tests here.
class LocationTestClass(TestCase):
# Set up method
def setUp(self):
#creating a new location and saving it
self.new_location=Location(location_name="Mozambique")
self.new_location.save_location()
def tearDown(self):
Location.objects.all().delete()
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.new_location,Location))
# Testing Save Method
def test_save_method(self):
self.new_location.save_location()
location =Location.objects.all()
self.assertTrue(len(location) > 0)
# Testing delete Method
def test_delete_method(self):
self.new_location.delete_location()
location =Location.objects.all()
self.assertTrue(len(location) == 0)
# Testing update Method
def test_update_location_method(self):
self.new_location.update_location(name="Mozambique",field="location_name", value="TestMozambique")
photo =Location.objects.all()
self.assertTrue(len(photo) ==1)
class CategoryTestClass(TestCase):
# Set up method
def setUp(self):
#creating a new category and saving it
self.new_category=Category(title="test")
self.new_category.save_category()
def tearDown(self):
Category.objects.all().delete()
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.new_category,Category))
# Testing Save Method
def test_save_method(self):
self.new_category.save_category()
category =Category.objects.all()
self.assertTrue(len(category) > 0)
# Testing delete Method
def test_delete_method(self):
self.new_category.delete_category()
category =Category.objects.all()
self.assertTrue(len(category) == 0)
# Testing update Method
def test_update_location_method(self):
self.new_category.update_category(name="test",field="title", value="TestCategory")
category =Category.objects.all()
self.assertTrue(len(category) ==1)
class PhotoImageTestClass(TestCase):
# Set up method
def setUp(self):
#creating a new location and saving it
self.new_location=Location(location_name="Mozambique")
self.new_location.save_location()
#creating a new category and saving it
self.new_category=Category(title="test")
self.new_category.save_category()
#creating a new photo and saving it
self.photo= PhotoImage(name = 'test1',description="MozambiqueTest",category=self.new_category)
self.photo.save_photo()
self.photo.location.add(self.new_location)
def tearDown(self):
Category.objects.all().delete()
Location.objects.all().delete()
PhotoImage.objects.all().delete()
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.photo,PhotoImage))
# Testing Save Method
def test_save_method(self):
self.photo.save_photo()
photo =PhotoImage.objects.all()
self.assertTrue(len(photo) > 0)
# Testing delete Method
def test_delete_method(self):
self.photo.delete_photo()
photo =PhotoImage.objects.all()
self.assertTrue(len(photo) == 0)
# Testing update Method
def test_update_category_method(self):
self.photo.update_photo(name="test1",field="description", value="TestMozambique")
photo =PhotoImage.objects.all()
self.assertTrue(len(photo) ==1)
# Testing search by category Method
def test_search_by_category_method(self):
self.photo.search_by_category(category="test")
photo =PhotoImage.objects.all()
self.assertTrue(len(photo) ==1)
# Testing search by id Method
def test_get_image_by_id_method(self):
self.photo.get_image_by_id(id=1)
photo =PhotoImage.objects.all()
self.assertTrue(len(photo) ==1)
class CopyTest(TestCase):
# Set up method
def setUp(self):
#creating a new photo and saving it
self.new_location=Location(location_name="Mozambique")
self.new_location.save_location()
#creating a new category and saving it
self.new_category=Category(title="test")
self.new_category.save_category()
#creating a new photo and saving it
self.photo= PhotoImage(name = 'test1',image="imageurl",description="MozambiqueTest",category=self.new_category)
self.photo.save_photo()
self.photo.location.add(self.new_location)
def tearDown(self):
Category.objects.all().delete()
Location.objects.all().delete()
PhotoImage.objects.all().delete()
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.photo,PhotoImage))
# Testing Save Method
def test_copy_method(self):
new_copy=self.photo.copy_photo("imageurl")
self.assertTrue(new_copy==self.photo.image.url)
| 32.64375
| 119
| 0.66054
| 616
| 5,223
| 5.456169
| 0.103896
| 0.052068
| 0.053555
| 0.078548
| 0.80601
| 0.773282
| 0.770902
| 0.75662
| 0.727165
| 0.707528
| 0
| 0.003755
| 0.235114
| 5,223
| 159
| 120
| 32.849057
| 0.837547
| 0.140149
| 0
| 0.65625
| 0
| 0
| 0.042143
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.25
| false
| 0
| 0.03125
| 0
| 0.322917
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba0c1b6262ce1cb0869cfc7cde679c3191f17e85
| 5,024
|
py
|
Python
|
rodnet/utils/visualization/demo.py
|
zhengzangw/RODNet
|
eca5f2bd1f3051c2b823d279532ddafa71b009c1
|
[
"MIT"
] | 109
|
2020-11-13T11:58:41.000Z
|
2022-03-29T06:46:09.000Z
|
rodnet/utils/visualization/demo.py
|
yh-luo/RODNet
|
969cad6f08b8957b26bc16f86ac4e835d1294050
|
[
"MIT"
] | 46
|
2021-01-13T08:53:12.000Z
|
2022-03-31T02:51:16.000Z
|
rodnet/utils/visualization/demo.py
|
yh-luo/RODNet
|
969cad6f08b8957b26bc16f86ac4e835d1294050
|
[
"MIT"
] | 43
|
2021-01-07T05:09:36.000Z
|
2022-03-20T11:13:58.000Z
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from rodnet.core.object_class import get_class_name
from .fig_configs import fig, fp, symbols
def visualize_train_img_old(fig_name, input_radar, output_confmap, confmap_gt):
fig = plt.figure(figsize=(8, 4))
img = input_radar
fig.add_subplot(1, 3, 1)
plt.imshow(img, vmin=0, vmax=1, origin='lower', aspect='auto')
img = output_confmap
fig.add_subplot(1, 3, 2)
plt.imshow(img, vmin=0, vmax=1, origin='lower', aspect='auto')
img = confmap_gt
fig.add_subplot(1, 3, 3)
plt.imshow(img, vmin=0, vmax=1, origin='lower', aspect='auto')
plt.savefig(fig_name)
plt.close(fig)
def visualize_train_img(fig_name, img_path, input_radar, output_confmap, confmap_gt):
fig = plt.figure(figsize=(8, 8))
img_data = mpimg.imread(img_path)
fig.add_subplot(2, 2, 1)
plt.imshow(img_data.astype(np.uint8))
fig.add_subplot(2, 2, 2)
plt.imshow(input_radar, origin='lower', aspect='auto')
fig.add_subplot(2, 2, 3)
output_confmap = np.transpose(output_confmap, (1, 2, 0))
output_confmap[output_confmap < 0] = 0
plt.imshow(output_confmap, vmin=0, vmax=1, origin='lower', aspect='auto')
fig.add_subplot(2, 2, 4)
confmap_gt = np.transpose(confmap_gt, (1, 2, 0))
plt.imshow(confmap_gt, vmin=0, vmax=1, origin='lower', aspect='auto')
plt.savefig(fig_name)
plt.close(fig)
def visualize_test_img(fig_name, img_path, input_radar, confmap_pred, confmap_gt, res_final, dataset, viz=False,
sybl=False):
max_dets, _ = res_final.shape
classes = dataset.object_cfg.classes
img_data = mpimg.imread(img_path)
if img_data.shape[0] > 864:
img_data = img_data[:img_data.shape[0] // 5 * 4, :, :]
fig.add_subplot(2, 2, 1)
plt.imshow(img_data.astype(np.uint8))
plt.axis('off')
plt.title("Image")
fig.add_subplot(2, 2, 2)
plt.imshow(input_radar, origin='lower', aspect='auto')
plt.axis('off')
plt.title("RA Heatmap")
fig.add_subplot(2, 2, 3)
confmap_pred = np.transpose(confmap_pred, (1, 2, 0))
confmap_pred[confmap_pred < 0] = 0
confmap_pred[confmap_pred > 1] = 1
plt.imshow(confmap_pred, vmin=0, vmax=1, origin='lower', aspect='auto')
for d in range(max_dets):
cla_id = int(res_final[d, 0])
if cla_id == -1:
continue
row_id = res_final[d, 1]
col_id = res_final[d, 2]
conf = res_final[d, 3]
conf = 1.0 if conf > 1 else conf
cla_str = get_class_name(cla_id, classes)
if sybl:
text = symbols[cla_str]
plt.text(col_id, row_id + 3, text, fontproperties=fp, color='white', size=20, ha="center")
else:
plt.scatter(col_id, row_id, s=10, c='white')
text = cla_str + '\n%.2f' % conf
plt.text(col_id + 5, row_id, text, color='white', fontsize=10)
plt.axis('off')
plt.title("RODNet Detection")
fig.add_subplot(2, 2, 4)
confmap_gt = np.transpose(confmap_gt, (1, 2, 0))
plt.imshow(confmap_gt, vmin=0, vmax=1, origin='lower', aspect='auto')
plt.axis('off')
plt.title("Ground Truth")
plt.savefig(fig_name)
if viz:
plt.pause(0.1)
plt.clf()
def visualize_test_img_wo_gt(fig_name, img_path, input_radar, confmap_pred, res_final, dataset, viz=False,
sybl=False):
max_dets, _ = res_final.shape
classes = dataset.object_cfg.classes
fig.set_size_inches(12, 4)
img_data = mpimg.imread(img_path)
if img_data.shape[0] > 864:
img_data = img_data[:img_data.shape[0] // 5 * 4, :, :]
fig.add_subplot(1, 3, 1)
plt.imshow(img_data.astype(np.uint8))
plt.axis('off')
plt.title("RGB Image")
fig.add_subplot(1, 3, 2)
input_radar[input_radar < 0] = 0
input_radar[input_radar > 1] = 1
plt.imshow(input_radar, vmin=0, vmax=1, origin='lower', aspect='auto')
plt.axis('off')
plt.title("RF Image")
fig.add_subplot(1, 3, 3)
confmap_pred = np.transpose(confmap_pred, (1, 2, 0))
confmap_pred[confmap_pred < 0] = 0
confmap_pred[confmap_pred > 1] = 1
plt.imshow(confmap_pred, vmin=0, vmax=1, origin='lower', aspect='auto')
for d in range(max_dets):
cla_id = int(res_final[d, 0])
if cla_id == -1:
continue
row_id = res_final[d, 1]
col_id = res_final[d, 2]
conf = res_final[d, 3]
conf = 1.0 if conf > 1 else conf
cla_str = get_class_name(cla_id, classes)
if sybl:
text = symbols[cla_str]
plt.text(col_id - 3, row_id + 2, text, fontproperties=fp, color='white', size=20)
else:
plt.scatter(col_id, row_id, s=10, c='white')
text = cla_str + '\n%.2f' % conf
plt.text(col_id + 5, row_id, text, color='white', fontsize=10)
plt.axis('off')
plt.title("RODNet Detections")
plt.savefig(fig_name)
if viz:
plt.pause(0.1)
plt.clf()
| 33.052632
| 112
| 0.621616
| 798
| 5,024
| 3.716792
| 0.145363
| 0.059339
| 0.061362
| 0.077883
| 0.833109
| 0.833109
| 0.794673
| 0.763655
| 0.736682
| 0.712407
| 0
| 0.039532
| 0.234674
| 5,024
| 151
| 113
| 33.271523
| 0.73186
| 0
| 0
| 0.761905
| 0
| 0
| 0.048766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031746
| false
| 0
| 0.039683
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e8ac38cbeb3fcd5576ff820f4b114c8e541ed403
| 128,507
|
py
|
Python
|
librespot/proto/Metadata_pb2.py
|
forslund/librespot-python
|
7a340b1b20889e1afae47aa0f433a0893f4290f1
|
[
"Apache-2.0"
] | 64
|
2021-02-24T06:46:34.000Z
|
2022-03-29T11:33:46.000Z
|
librespot/proto/Metadata_pb2.py
|
forslund/librespot-python
|
7a340b1b20889e1afae47aa0f433a0893f4290f1
|
[
"Apache-2.0"
] | 16
|
2021-04-24T12:25:30.000Z
|
2022-02-19T00:02:44.000Z
|
librespot/proto/Metadata_pb2.py
|
forslund/librespot-python
|
7a340b1b20889e1afae47aa0f433a0893f4290f1
|
[
"Apache-2.0"
] | 22
|
2021-04-05T23:57:14.000Z
|
2022-03-10T04:45:08.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: metadata.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='metadata.proto',
package='spotify.metadata.proto',
syntax='proto2',
serialized_options=b'\n\024com.spotify.metadataB\010MetadataH\002',
create_key=_descriptor._internal_create_key,
serialized_pb=
b'\n\x0emetadata.proto\x12\x16spotify.metadata.proto\"\x8a\x07\n\x06\x41rtist\x12\x0b\n\x03gid\x18\x01 \x01(\x0c\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\npopularity\x18\x03 \x01(\x11\x12\x34\n\ttop_track\x18\x04 \x03(\x0b\x32!.spotify.metadata.proto.TopTracks\x12\x37\n\x0b\x61lbum_group\x18\x05 \x03(\x0b\x32\".spotify.metadata.proto.AlbumGroup\x12\x38\n\x0csingle_group\x18\x06 \x03(\x0b\x32\".spotify.metadata.proto.AlbumGroup\x12=\n\x11\x63ompilation_group\x18\x07 \x03(\x0b\x32\".spotify.metadata.proto.AlbumGroup\x12<\n\x10\x61ppears_on_group\x18\x08 \x03(\x0b\x32\".spotify.metadata.proto.AlbumGroup\x12\r\n\x05genre\x18\t \x03(\t\x12\x37\n\x0b\x65xternal_id\x18\n \x03(\x0b\x32\".spotify.metadata.proto.ExternalId\x12/\n\x08portrait\x18\x0b \x03(\x0b\x32\x1d.spotify.metadata.proto.Image\x12\x34\n\tbiography\x18\x0c \x03(\x0b\x32!.spotify.metadata.proto.Biography\x12?\n\x0f\x61\x63tivity_period\x18\r \x03(\x0b\x32&.spotify.metadata.proto.ActivityPeriod\x12\x38\n\x0brestriction\x18\x0e \x03(\x0b\x32#.spotify.metadata.proto.Restriction\x12/\n\x07related\x18\x0f \x03(\x0b\x32\x1e.spotify.metadata.proto.Artist\x12\x1f\n\x17is_portrait_album_cover\x18\x10 \x01(\x08\x12:\n\x0eportrait_group\x18\x11 \x01(\x0b\x32\".spotify.metadata.proto.ImageGroup\x12\x37\n\x0bsale_period\x18\x12 \x03(\x0b\x32\".spotify.metadata.proto.SalePeriod\x12:\n\x0c\x61vailability\x18\x14 \x03(\x0b\x32$.spotify.metadata.proto.Availability\"\xe8\x06\n\x05\x41lbum\x12\x0b\n\x03gid\x18\x01 \x01(\x0c\x12\x0c\n\x04name\x18\x02 \x01(\t\x12.\n\x06\x61rtist\x18\x03 \x03(\x0b\x32\x1e.spotify.metadata.proto.Artist\x12\x30\n\x04type\x18\x04 \x01(\x0e\x32\".spotify.metadata.proto.Album.Type\x12\r\n\x05label\x18\x05 \x01(\t\x12*\n\x04\x64\x61te\x18\x06 \x01(\x0b\x32\x1c.spotify.metadata.proto.Date\x12\x12\n\npopularity\x18\x07 \x01(\x11\x12\r\n\x05genre\x18\x08 \x03(\t\x12,\n\x05\x63over\x18\t \x03(\x0b\x32\x1d.spotify.metadata.proto.Image\x12\x37\n\x0b\x65xternal_id\x18\n \x03(\x0b\x32\".spotify.metadata.proto.ExternalId\x12*\n\x04\x64isc\x18\x0b \x03(\x0b\x32\x1c.spotify.metadata.proto.Disc\x12\x0e\n\x06review\x18\x0c \x03(\t\x12\x34\n\tcopyright\x18\r \x03(\x0b\x32!.spotify.metadata.proto.Copyright\x12\x38\n\x0brestriction\x18\x0e \x03(\x0b\x32#.spotify.metadata.proto.Restriction\x12.\n\x07related\x18\x0f \x03(\x0b\x32\x1d.spotify.metadata.proto.Album\x12\x37\n\x0bsale_period\x18\x10 \x03(\x0b\x32\".spotify.metadata.proto.SalePeriod\x12\x37\n\x0b\x63over_group\x18\x11 \x01(\x0b\x32\".spotify.metadata.proto.ImageGroup\x12\x16\n\x0eoriginal_title\x18\x12 \x01(\t\x12\x15\n\rversion_title\x18\x13 \x01(\t\x12\x10\n\x08type_str\x18\x14 \x01(\t\x12:\n\x0c\x61vailability\x18\x17 \x03(\x0b\x32$.spotify.metadata.proto.Availability\"R\n\x04Type\x12\t\n\x05\x41LBUM\x10\x01\x12\n\n\x06SINGLE\x10\x02\x12\x0f\n\x0b\x43OMPILATION\x10\x03\x12\x06\n\x02\x45P\x10\x04\x12\r\n\tAUDIOBOOK\x10\x05\x12\x0b\n\x07PODCAST\x10\x06\"\xd5\x05\n\x05Track\x12\x0b\n\x03gid\x18\x01 \x01(\x0c\x12\x0c\n\x04name\x18\x02 \x01(\t\x12,\n\x05\x61lbum\x18\x03 \x01(\x0b\x32\x1d.spotify.metadata.proto.Album\x12.\n\x06\x61rtist\x18\x04 \x03(\x0b\x32\x1e.spotify.metadata.proto.Artist\x12\x0e\n\x06number\x18\x05 \x01(\x11\x12\x13\n\x0b\x64isc_number\x18\x06 \x01(\x11\x12\x10\n\x08\x64uration\x18\x07 \x01(\x11\x12\x12\n\npopularity\x18\x08 \x01(\x11\x12\x10\n\x08\x65xplicit\x18\t \x01(\x08\x12\x37\n\x0b\x65xternal_id\x18\n \x03(\x0b\x32\".spotify.metadata.proto.ExternalId\x12\x38\n\x0brestriction\x18\x0b \x03(\x0b\x32#.spotify.metadata.proto.Restriction\x12/\n\x04\x66ile\x18\x0c \x03(\x0b\x32!.spotify.metadata.proto.AudioFile\x12\x32\n\x0b\x61lternative\x18\r \x03(\x0b\x32\x1d.spotify.metadata.proto.Track\x12\x37\n\x0bsale_period\x18\x0e \x03(\x0b\x32\".spotify.metadata.proto.SalePeriod\x12\x32\n\x07preview\x18\x0f \x03(\x0b\x32!.spotify.metadata.proto.AudioFile\x12\x0c\n\x04tags\x18\x10 \x03(\t\x12\x1f\n\x17\x65\x61rliest_live_timestamp\x18\x11 \x01(\x03\x12\x12\n\nhas_lyrics\x18\x12 \x01(\x08\x12:\n\x0c\x61vailability\x18\x13 \x03(\x0b\x32$.spotify.metadata.proto.Availability\x12\x32\n\x08licensor\x18\x15 \x01(\x0b\x32 .spotify.metadata.proto.Licensor\"\xbf\x05\n\x04Show\x12\x0b\n\x03gid\x18\x01 \x01(\x0c\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18@ \x01(\t\x12!\n\x15\x64\x65precated_popularity\x18\x41 \x01(\x11\x42\x02\x18\x01\x12\x11\n\tpublisher\x18\x42 \x01(\t\x12\x10\n\x08language\x18\x43 \x01(\t\x12\x10\n\x08\x65xplicit\x18\x44 \x01(\x08\x12\x37\n\x0b\x63over_image\x18\x45 \x01(\x0b\x32\".spotify.metadata.proto.ImageGroup\x12\x30\n\x07\x65pisode\x18\x46 \x03(\x0b\x32\x1f.spotify.metadata.proto.Episode\x12\x34\n\tcopyright\x18G \x03(\x0b\x32!.spotify.metadata.proto.Copyright\x12\x38\n\x0brestriction\x18H \x03(\x0b\x32#.spotify.metadata.proto.Restriction\x12\x0f\n\x07keyword\x18I \x03(\t\x12:\n\nmedia_type\x18J \x01(\x0e\x32&.spotify.metadata.proto.Show.MediaType\x12H\n\x11\x63onsumption_order\x18K \x01(\x0e\x32-.spotify.metadata.proto.Show.ConsumptionOrder\x12:\n\x0c\x61vailability\x18N \x03(\x0b\x32$.spotify.metadata.proto.Availability\x12\x13\n\x0btrailer_uri\x18S \x01(\t\",\n\tMediaType\x12\t\n\x05MIXED\x10\x00\x12\t\n\x05\x41UDIO\x10\x01\x12\t\n\x05VIDEO\x10\x02\"<\n\x10\x43onsumptionOrder\x12\x0e\n\nSEQUENTIAL\x10\x01\x12\x0c\n\x08\x45PISODIC\x10\x02\x12\n\n\x06RECENT\x10\x03\"\xf9\x06\n\x07\x45pisode\x12\x0b\n\x03gid\x18\x01 \x01(\x0c\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x64uration\x18\x07 \x01(\x11\x12\x30\n\x05\x61udio\x18\x0c \x03(\x0b\x32!.spotify.metadata.proto.AudioFile\x12\x13\n\x0b\x64\x65scription\x18@ \x01(\t\x12\x0e\n\x06number\x18\x41 \x01(\x11\x12\x32\n\x0cpublish_time\x18\x42 \x01(\x0b\x32\x1c.spotify.metadata.proto.Date\x12!\n\x15\x64\x65precated_popularity\x18\x43 \x01(\x11\x42\x02\x18\x01\x12\x37\n\x0b\x63over_image\x18\x44 \x01(\x0b\x32\".spotify.metadata.proto.ImageGroup\x12\x10\n\x08language\x18\x45 \x01(\t\x12\x10\n\x08\x65xplicit\x18\x46 \x01(\x08\x12*\n\x04show\x18G \x01(\x0b\x32\x1c.spotify.metadata.proto.Show\x12\x30\n\x05video\x18H \x03(\x0b\x32!.spotify.metadata.proto.VideoFile\x12\x38\n\rvideo_preview\x18I \x03(\x0b\x32!.spotify.metadata.proto.VideoFile\x12\x38\n\raudio_preview\x18J \x03(\x0b\x32!.spotify.metadata.proto.AudioFile\x12\x38\n\x0brestriction\x18K \x03(\x0b\x32#.spotify.metadata.proto.Restriction\x12\x38\n\x0c\x66reeze_frame\x18L \x01(\x0b\x32\".spotify.metadata.proto.ImageGroup\x12\x0f\n\x07keyword\x18M \x03(\t\x12!\n\x19\x61llow_background_playback\x18Q \x01(\x08\x12:\n\x0c\x61vailability\x18R \x03(\x0b\x32$.spotify.metadata.proto.Availability\x12\x14\n\x0c\x65xternal_url\x18S \x01(\t\x12\x39\n\x04type\x18W \x01(\x0e\x32+.spotify.metadata.proto.Episode.EpisodeType\"/\n\x0b\x45pisodeType\x12\x08\n\x04\x46ULL\x10\x00\x12\x0b\n\x07TRAILER\x10\x01\x12\t\n\x05\x42ONUS\x10\x02\"\x18\n\x08Licensor\x12\x0c\n\x04uuid\x18\x01 \x01(\x0c\"J\n\tTopTracks\x12\x0f\n\x07\x63ountry\x18\x01 \x01(\t\x12,\n\x05track\x18\x02 \x03(\x0b\x32\x1d.spotify.metadata.proto.Track\"F\n\x0e\x41\x63tivityPeriod\x12\x12\n\nstart_year\x18\x01 \x01(\x11\x12\x10\n\x08\x65nd_year\x18\x02 \x01(\x11\x12\x0e\n\x06\x64\x65\x63\x61\x64\x65\x18\x03 \x01(\x11\":\n\nAlbumGroup\x12,\n\x05\x61lbum\x18\x01 \x03(\x0b\x32\x1d.spotify.metadata.proto.Album\"N\n\x04\x44\x61te\x12\x0c\n\x04year\x18\x01 \x01(\x11\x12\r\n\x05month\x18\x02 \x01(\x11\x12\x0b\n\x03\x64\x61y\x18\x03 \x01(\x11\x12\x0c\n\x04hour\x18\x04 \x01(\x11\x12\x0e\n\x06minute\x18\x05 \x01(\x11\"\xa0\x01\n\x05Image\x12\x0f\n\x07\x66ile_id\x18\x01 \x01(\x0c\x12\x30\n\x04size\x18\x02 \x01(\x0e\x32\".spotify.metadata.proto.Image.Size\x12\r\n\x05width\x18\x03 \x01(\x11\x12\x0e\n\x06height\x18\x04 \x01(\x11\"5\n\x04Size\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05SMALL\x10\x01\x12\t\n\x05LARGE\x10\x02\x12\n\n\x06XLARGE\x10\x03\":\n\nImageGroup\x12,\n\x05image\x18\x01 \x03(\x0b\x32\x1d.spotify.metadata.proto.Image\"\x86\x01\n\tBiography\x12\x0c\n\x04text\x18\x01 \x01(\t\x12/\n\x08portrait\x18\x02 \x03(\x0b\x32\x1d.spotify.metadata.proto.Image\x12:\n\x0eportrait_group\x18\x03 \x03(\x0b\x32\".spotify.metadata.proto.ImageGroup\"R\n\x04\x44isc\x12\x0e\n\x06number\x18\x01 \x01(\x11\x12\x0c\n\x04name\x18\x02 \x01(\t\x12,\n\x05track\x18\x03 \x03(\x0b\x32\x1d.spotify.metadata.proto.Track\"e\n\tCopyright\x12\x34\n\x04type\x18\x01 \x01(\x0e\x32&.spotify.metadata.proto.Copyright.Type\x12\x0c\n\x04text\x18\x02 \x01(\t\"\x14\n\x04Type\x12\x05\n\x01P\x10\x00\x12\x05\n\x01\x43\x10\x01\"\xdf\x02\n\x0bRestriction\x12@\n\tcatalogue\x18\x01 \x03(\x0e\x32-.spotify.metadata.proto.Restriction.Catalogue\x12\x36\n\x04type\x18\x04 \x01(\x0e\x32(.spotify.metadata.proto.Restriction.Type\x12\x15\n\rcatalogue_str\x18\x05 \x03(\t\x12\x1b\n\x11\x63ountries_allowed\x18\x02 \x01(\tH\x00\x12\x1d\n\x13\x63ountries_forbidden\x18\x03 \x01(\tH\x00\"U\n\tCatalogue\x12\x06\n\x02\x41\x44\x10\x00\x12\x10\n\x0cSUBSCRIPTION\x10\x01\x12\x11\n\rCATALOGUE_ALL\x10\x02\x12\x0b\n\x07SHUFFLE\x10\x03\x12\x0e\n\nCOMMERCIAL\x10\x04\"\x15\n\x04Type\x12\r\n\tSTREAMING\x10\x00\x42\x15\n\x13\x63ountry_restriction\"R\n\x0c\x41vailability\x12\x15\n\rcatalogue_str\x18\x01 \x03(\t\x12+\n\x05start\x18\x02 \x01(\x0b\x32\x1c.spotify.metadata.proto.Date\"\x9e\x01\n\nSalePeriod\x12\x38\n\x0brestriction\x18\x01 \x03(\x0b\x32#.spotify.metadata.proto.Restriction\x12+\n\x05start\x18\x02 \x01(\x0b\x32\x1c.spotify.metadata.proto.Date\x12)\n\x03\x65nd\x18\x03 \x01(\x0b\x32\x1c.spotify.metadata.proto.Date\"&\n\nExternalId\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\"\x89\x02\n\tAudioFile\x12\x0f\n\x07\x66ile_id\x18\x01 \x01(\x0c\x12\x38\n\x06\x66ormat\x18\x02 \x01(\x0e\x32(.spotify.metadata.proto.AudioFile.Format\"\xb0\x01\n\x06\x46ormat\x12\x11\n\rOGG_VORBIS_96\x10\x00\x12\x12\n\x0eOGG_VORBIS_160\x10\x01\x12\x12\n\x0eOGG_VORBIS_320\x10\x02\x12\x0b\n\x07MP3_256\x10\x03\x12\x0b\n\x07MP3_320\x10\x04\x12\x0b\n\x07MP3_160\x10\x05\x12\n\n\x06MP3_96\x10\x06\x12\x0f\n\x0bMP3_160_ENC\x10\x07\x12\n\n\x06\x41\x41\x43_24\x10\x08\x12\n\n\x06\x41\x41\x43_48\x10\t\x12\x0f\n\x0b\x41\x41\x43_24_NORM\x10\x10\"\x1c\n\tVideoFile\x12\x0f\n\x07\x66ile_id\x18\x01 \x01(\x0c\x42\"\n\x14\x63om.spotify.metadataB\x08MetadataH\x02'
)
_ALBUM_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='spotify.metadata.proto.Album.Type',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ALBUM',
index=0,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SINGLE',
index=1,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='COMPILATION',
index=2,
number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EP',
index=3,
number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIOBOOK',
index=4,
number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PODCAST',
index=5,
number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1742,
serialized_end=1824,
)
_sym_db.RegisterEnumDescriptor(_ALBUM_TYPE)
_SHOW_MEDIATYPE = _descriptor.EnumDescriptor(
name='MediaType',
full_name='spotify.metadata.proto.Show.MediaType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='MIXED',
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO',
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VIDEO',
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3152,
serialized_end=3196,
)
_sym_db.RegisterEnumDescriptor(_SHOW_MEDIATYPE)
_SHOW_CONSUMPTIONORDER = _descriptor.EnumDescriptor(
name='ConsumptionOrder',
full_name='spotify.metadata.proto.Show.ConsumptionOrder',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='SEQUENTIAL',
index=0,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EPISODIC',
index=1,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RECENT',
index=2,
number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3198,
serialized_end=3258,
)
_sym_db.RegisterEnumDescriptor(_SHOW_CONSUMPTIONORDER)
_EPISODE_EPISODETYPE = _descriptor.EnumDescriptor(
name='EpisodeType',
full_name='spotify.metadata.proto.Episode.EpisodeType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FULL',
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRAILER',
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BONUS',
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4103,
serialized_end=4150,
)
_sym_db.RegisterEnumDescriptor(_EPISODE_EPISODETYPE)
_IMAGE_SIZE = _descriptor.EnumDescriptor(
name='Size',
full_name='spotify.metadata.proto.Image.Size',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT',
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SMALL',
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LARGE',
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XLARGE',
index=3,
number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4574,
serialized_end=4627,
)
_sym_db.RegisterEnumDescriptor(_IMAGE_SIZE)
_COPYRIGHT_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='spotify.metadata.proto.Copyright.Type',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='P',
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='C',
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4991,
serialized_end=5011,
)
_sym_db.RegisterEnumDescriptor(_COPYRIGHT_TYPE)
_RESTRICTION_CATALOGUE = _descriptor.EnumDescriptor(
name='Catalogue',
full_name='spotify.metadata.proto.Restriction.Catalogue',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='AD',
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SUBSCRIPTION',
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CATALOGUE_ALL',
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHUFFLE',
index=3,
number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='COMMERCIAL',
index=4,
number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5234,
serialized_end=5319,
)
_sym_db.RegisterEnumDescriptor(_RESTRICTION_CATALOGUE)
_RESTRICTION_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='spotify.metadata.proto.Restriction.Type',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='STREAMING',
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5321,
serialized_end=5342,
)
_sym_db.RegisterEnumDescriptor(_RESTRICTION_TYPE)
_AUDIOFILE_FORMAT = _descriptor.EnumDescriptor(
name='Format',
full_name='spotify.metadata.proto.AudioFile.Format',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='OGG_VORBIS_96',
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OGG_VORBIS_160',
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OGG_VORBIS_320',
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MP3_256',
index=3,
number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MP3_320',
index=4,
number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MP3_160',
index=5,
number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MP3_96',
index=6,
number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MP3_160_ENC',
index=7,
number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AAC_24',
index=8,
number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AAC_48',
index=9,
number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AAC_24_NORM',
index=10,
number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5742,
serialized_end=5918,
)
_sym_db.RegisterEnumDescriptor(_AUDIOFILE_FORMAT)
_ARTIST = _descriptor.Descriptor(
name='Artist',
full_name='spotify.metadata.proto.Artist',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='gid',
full_name='spotify.metadata.proto.Artist.gid',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name',
full_name='spotify.metadata.proto.Artist.name',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='popularity',
full_name='spotify.metadata.proto.Artist.popularity',
index=2,
number=3,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='top_track',
full_name='spotify.metadata.proto.Artist.top_track',
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='album_group',
full_name='spotify.metadata.proto.Artist.album_group',
index=4,
number=5,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='single_group',
full_name='spotify.metadata.proto.Artist.single_group',
index=5,
number=6,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='compilation_group',
full_name='spotify.metadata.proto.Artist.compilation_group',
index=6,
number=7,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='appears_on_group',
full_name='spotify.metadata.proto.Artist.appears_on_group',
index=7,
number=8,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='genre',
full_name='spotify.metadata.proto.Artist.genre',
index=8,
number=9,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_id',
full_name='spotify.metadata.proto.Artist.external_id',
index=9,
number=10,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='portrait',
full_name='spotify.metadata.proto.Artist.portrait',
index=10,
number=11,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='biography',
full_name='spotify.metadata.proto.Artist.biography',
index=11,
number=12,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='activity_period',
full_name='spotify.metadata.proto.Artist.activity_period',
index=12,
number=13,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='restriction',
full_name='spotify.metadata.proto.Artist.restriction',
index=13,
number=14,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='related',
full_name='spotify.metadata.proto.Artist.related',
index=14,
number=15,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_portrait_album_cover',
full_name='spotify.metadata.proto.Artist.is_portrait_album_cover',
index=15,
number=16,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='portrait_group',
full_name='spotify.metadata.proto.Artist.portrait_group',
index=16,
number=17,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sale_period',
full_name='spotify.metadata.proto.Artist.sale_period',
index=17,
number=18,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='availability',
full_name='spotify.metadata.proto.Artist.availability',
index=18,
number=20,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=43,
serialized_end=949,
)
_ALBUM = _descriptor.Descriptor(
name='Album',
full_name='spotify.metadata.proto.Album',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='gid',
full_name='spotify.metadata.proto.Album.gid',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name',
full_name='spotify.metadata.proto.Album.name',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='artist',
full_name='spotify.metadata.proto.Album.artist',
index=2,
number=3,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type',
full_name='spotify.metadata.proto.Album.type',
index=3,
number=4,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=1,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='label',
full_name='spotify.metadata.proto.Album.label',
index=4,
number=5,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='date',
full_name='spotify.metadata.proto.Album.date',
index=5,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='popularity',
full_name='spotify.metadata.proto.Album.popularity',
index=6,
number=7,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='genre',
full_name='spotify.metadata.proto.Album.genre',
index=7,
number=8,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cover',
full_name='spotify.metadata.proto.Album.cover',
index=8,
number=9,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_id',
full_name='spotify.metadata.proto.Album.external_id',
index=9,
number=10,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='disc',
full_name='spotify.metadata.proto.Album.disc',
index=10,
number=11,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='review',
full_name='spotify.metadata.proto.Album.review',
index=11,
number=12,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='copyright',
full_name='spotify.metadata.proto.Album.copyright',
index=12,
number=13,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='restriction',
full_name='spotify.metadata.proto.Album.restriction',
index=13,
number=14,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='related',
full_name='spotify.metadata.proto.Album.related',
index=14,
number=15,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sale_period',
full_name='spotify.metadata.proto.Album.sale_period',
index=15,
number=16,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cover_group',
full_name='spotify.metadata.proto.Album.cover_group',
index=16,
number=17,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='original_title',
full_name='spotify.metadata.proto.Album.original_title',
index=17,
number=18,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version_title',
full_name='spotify.metadata.proto.Album.version_title',
index=18,
number=19,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type_str',
full_name='spotify.metadata.proto.Album.type_str',
index=19,
number=20,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='availability',
full_name='spotify.metadata.proto.Album.availability',
index=20,
number=23,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[
_ALBUM_TYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=952,
serialized_end=1824,
)
_TRACK = _descriptor.Descriptor(
name='Track',
full_name='spotify.metadata.proto.Track',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='gid',
full_name='spotify.metadata.proto.Track.gid',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name',
full_name='spotify.metadata.proto.Track.name',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='album',
full_name='spotify.metadata.proto.Track.album',
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='artist',
full_name='spotify.metadata.proto.Track.artist',
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='number',
full_name='spotify.metadata.proto.Track.number',
index=4,
number=5,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='disc_number',
full_name='spotify.metadata.proto.Track.disc_number',
index=5,
number=6,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='duration',
full_name='spotify.metadata.proto.Track.duration',
index=6,
number=7,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='popularity',
full_name='spotify.metadata.proto.Track.popularity',
index=7,
number=8,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='explicit',
full_name='spotify.metadata.proto.Track.explicit',
index=8,
number=9,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_id',
full_name='spotify.metadata.proto.Track.external_id',
index=9,
number=10,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='restriction',
full_name='spotify.metadata.proto.Track.restriction',
index=10,
number=11,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='file',
full_name='spotify.metadata.proto.Track.file',
index=11,
number=12,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='alternative',
full_name='spotify.metadata.proto.Track.alternative',
index=12,
number=13,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sale_period',
full_name='spotify.metadata.proto.Track.sale_period',
index=13,
number=14,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='preview',
full_name='spotify.metadata.proto.Track.preview',
index=14,
number=15,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tags',
full_name='spotify.metadata.proto.Track.tags',
index=15,
number=16,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='earliest_live_timestamp',
full_name='spotify.metadata.proto.Track.earliest_live_timestamp',
index=16,
number=17,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='has_lyrics',
full_name='spotify.metadata.proto.Track.has_lyrics',
index=17,
number=18,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='availability',
full_name='spotify.metadata.proto.Track.availability',
index=18,
number=19,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='licensor',
full_name='spotify.metadata.proto.Track.licensor',
index=19,
number=21,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=1827,
serialized_end=2552,
)
_SHOW = _descriptor.Descriptor(
name='Show',
full_name='spotify.metadata.proto.Show',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='gid',
full_name='spotify.metadata.proto.Show.gid',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name',
full_name='spotify.metadata.proto.Show.name',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description',
full_name='spotify.metadata.proto.Show.description',
index=2,
number=64,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deprecated_popularity',
full_name='spotify.metadata.proto.Show.deprecated_popularity',
index=3,
number=65,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=b'\030\001',
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='publisher',
full_name='spotify.metadata.proto.Show.publisher',
index=4,
number=66,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language',
full_name='spotify.metadata.proto.Show.language',
index=5,
number=67,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='explicit',
full_name='spotify.metadata.proto.Show.explicit',
index=6,
number=68,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cover_image',
full_name='spotify.metadata.proto.Show.cover_image',
index=7,
number=69,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='episode',
full_name='spotify.metadata.proto.Show.episode',
index=8,
number=70,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='copyright',
full_name='spotify.metadata.proto.Show.copyright',
index=9,
number=71,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='restriction',
full_name='spotify.metadata.proto.Show.restriction',
index=10,
number=72,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keyword',
full_name='spotify.metadata.proto.Show.keyword',
index=11,
number=73,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='media_type',
full_name='spotify.metadata.proto.Show.media_type',
index=12,
number=74,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='consumption_order',
full_name='spotify.metadata.proto.Show.consumption_order',
index=13,
number=75,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=1,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='availability',
full_name='spotify.metadata.proto.Show.availability',
index=14,
number=78,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trailer_uri',
full_name='spotify.metadata.proto.Show.trailer_uri',
index=15,
number=83,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[
_SHOW_MEDIATYPE,
_SHOW_CONSUMPTIONORDER,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=2555,
serialized_end=3258,
)
_EPISODE = _descriptor.Descriptor(
name='Episode',
full_name='spotify.metadata.proto.Episode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='gid',
full_name='spotify.metadata.proto.Episode.gid',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name',
full_name='spotify.metadata.proto.Episode.name',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='duration',
full_name='spotify.metadata.proto.Episode.duration',
index=2,
number=7,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='audio',
full_name='spotify.metadata.proto.Episode.audio',
index=3,
number=12,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description',
full_name='spotify.metadata.proto.Episode.description',
index=4,
number=64,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='number',
full_name='spotify.metadata.proto.Episode.number',
index=5,
number=65,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='publish_time',
full_name='spotify.metadata.proto.Episode.publish_time',
index=6,
number=66,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deprecated_popularity',
full_name='spotify.metadata.proto.Episode.deprecated_popularity',
index=7,
number=67,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=b'\030\001',
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cover_image',
full_name='spotify.metadata.proto.Episode.cover_image',
index=8,
number=68,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language',
full_name='spotify.metadata.proto.Episode.language',
index=9,
number=69,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='explicit',
full_name='spotify.metadata.proto.Episode.explicit',
index=10,
number=70,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='show',
full_name='spotify.metadata.proto.Episode.show',
index=11,
number=71,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='video',
full_name='spotify.metadata.proto.Episode.video',
index=12,
number=72,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='video_preview',
full_name='spotify.metadata.proto.Episode.video_preview',
index=13,
number=73,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='audio_preview',
full_name='spotify.metadata.proto.Episode.audio_preview',
index=14,
number=74,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='restriction',
full_name='spotify.metadata.proto.Episode.restriction',
index=15,
number=75,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='freeze_frame',
full_name='spotify.metadata.proto.Episode.freeze_frame',
index=16,
number=76,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keyword',
full_name='spotify.metadata.proto.Episode.keyword',
index=17,
number=77,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='allow_background_playback',
full_name=
'spotify.metadata.proto.Episode.allow_background_playback',
index=18,
number=81,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='availability',
full_name='spotify.metadata.proto.Episode.availability',
index=19,
number=82,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_url',
full_name='spotify.metadata.proto.Episode.external_url',
index=20,
number=83,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type',
full_name='spotify.metadata.proto.Episode.type',
index=21,
number=87,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[
_EPISODE_EPISODETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=3261,
serialized_end=4150,
)
_LICENSOR = _descriptor.Descriptor(
name='Licensor',
full_name='spotify.metadata.proto.Licensor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='uuid',
full_name='spotify.metadata.proto.Licensor.uuid',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4152,
serialized_end=4176,
)
_TOPTRACKS = _descriptor.Descriptor(
name='TopTracks',
full_name='spotify.metadata.proto.TopTracks',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='country',
full_name='spotify.metadata.proto.TopTracks.country',
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='track',
full_name='spotify.metadata.proto.TopTracks.track',
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4178,
serialized_end=4252,
)
_ACTIVITYPERIOD = _descriptor.Descriptor(
name='ActivityPeriod',
full_name='spotify.metadata.proto.ActivityPeriod',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='start_year',
full_name='spotify.metadata.proto.ActivityPeriod.start_year',
index=0,
number=1,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='end_year',
full_name='spotify.metadata.proto.ActivityPeriod.end_year',
index=1,
number=2,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decade',
full_name='spotify.metadata.proto.ActivityPeriod.decade',
index=2,
number=3,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4254,
serialized_end=4324,
)
_ALBUMGROUP = _descriptor.Descriptor(
name='AlbumGroup',
full_name='spotify.metadata.proto.AlbumGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='album',
full_name='spotify.metadata.proto.AlbumGroup.album',
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4326,
serialized_end=4384,
)
_DATE = _descriptor.Descriptor(
name='Date',
full_name='spotify.metadata.proto.Date',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='year',
full_name='spotify.metadata.proto.Date.year',
index=0,
number=1,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='month',
full_name='spotify.metadata.proto.Date.month',
index=1,
number=2,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='day',
full_name='spotify.metadata.proto.Date.day',
index=2,
number=3,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hour',
full_name='spotify.metadata.proto.Date.hour',
index=3,
number=4,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='minute',
full_name='spotify.metadata.proto.Date.minute',
index=4,
number=5,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4386,
serialized_end=4464,
)
_IMAGE = _descriptor.Descriptor(
name='Image',
full_name='spotify.metadata.proto.Image',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='file_id',
full_name='spotify.metadata.proto.Image.file_id',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='size',
full_name='spotify.metadata.proto.Image.size',
index=1,
number=2,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='width',
full_name='spotify.metadata.proto.Image.width',
index=2,
number=3,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='height',
full_name='spotify.metadata.proto.Image.height',
index=3,
number=4,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[
_IMAGE_SIZE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4467,
serialized_end=4627,
)
_IMAGEGROUP = _descriptor.Descriptor(
name='ImageGroup',
full_name='spotify.metadata.proto.ImageGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='image',
full_name='spotify.metadata.proto.ImageGroup.image',
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4629,
serialized_end=4687,
)
_BIOGRAPHY = _descriptor.Descriptor(
name='Biography',
full_name='spotify.metadata.proto.Biography',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='text',
full_name='spotify.metadata.proto.Biography.text',
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='portrait',
full_name='spotify.metadata.proto.Biography.portrait',
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='portrait_group',
full_name='spotify.metadata.proto.Biography.portrait_group',
index=2,
number=3,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4690,
serialized_end=4824,
)
_DISC = _descriptor.Descriptor(
name='Disc',
full_name='spotify.metadata.proto.Disc',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='number',
full_name='spotify.metadata.proto.Disc.number',
index=0,
number=1,
type=17,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name',
full_name='spotify.metadata.proto.Disc.name',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='track',
full_name='spotify.metadata.proto.Disc.track',
index=2,
number=3,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4826,
serialized_end=4908,
)
_COPYRIGHT = _descriptor.Descriptor(
name='Copyright',
full_name='spotify.metadata.proto.Copyright',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type',
full_name='spotify.metadata.proto.Copyright.type',
index=0,
number=1,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text',
full_name='spotify.metadata.proto.Copyright.text',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[
_COPYRIGHT_TYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=4910,
serialized_end=5011,
)
_RESTRICTION = _descriptor.Descriptor(
name='Restriction',
full_name='spotify.metadata.proto.Restriction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='catalogue',
full_name='spotify.metadata.proto.Restriction.catalogue',
index=0,
number=1,
type=14,
cpp_type=8,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type',
full_name='spotify.metadata.proto.Restriction.type',
index=1,
number=4,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='catalogue_str',
full_name='spotify.metadata.proto.Restriction.catalogue_str',
index=2,
number=5,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='countries_allowed',
full_name='spotify.metadata.proto.Restriction.countries_allowed',
index=3,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='countries_forbidden',
full_name='spotify.metadata.proto.Restriction.countries_forbidden',
index=4,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[
_RESTRICTION_CATALOGUE,
_RESTRICTION_TYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='country_restriction',
full_name='spotify.metadata.proto.Restriction.country_restriction',
index=0,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=5014,
serialized_end=5365,
)
_AVAILABILITY = _descriptor.Descriptor(
name='Availability',
full_name='spotify.metadata.proto.Availability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='catalogue_str',
full_name='spotify.metadata.proto.Availability.catalogue_str',
index=0,
number=1,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start',
full_name='spotify.metadata.proto.Availability.start',
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=5367,
serialized_end=5449,
)
_SALEPERIOD = _descriptor.Descriptor(
name='SalePeriod',
full_name='spotify.metadata.proto.SalePeriod',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='restriction',
full_name='spotify.metadata.proto.SalePeriod.restriction',
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start',
full_name='spotify.metadata.proto.SalePeriod.start',
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='end',
full_name='spotify.metadata.proto.SalePeriod.end',
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=5452,
serialized_end=5610,
)
_EXTERNALID = _descriptor.Descriptor(
name='ExternalId',
full_name='spotify.metadata.proto.ExternalId',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type',
full_name='spotify.metadata.proto.ExternalId.type',
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id',
full_name='spotify.metadata.proto.ExternalId.id',
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=5612,
serialized_end=5650,
)
_AUDIOFILE = _descriptor.Descriptor(
name='AudioFile',
full_name='spotify.metadata.proto.AudioFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='file_id',
full_name='spotify.metadata.proto.AudioFile.file_id',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='format',
full_name='spotify.metadata.proto.AudioFile.format',
index=1,
number=2,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[
_AUDIOFILE_FORMAT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=5653,
serialized_end=5918,
)
_VIDEOFILE = _descriptor.Descriptor(
name='VideoFile',
full_name='spotify.metadata.proto.VideoFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='file_id',
full_name='spotify.metadata.proto.VideoFile.file_id',
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[],
serialized_start=5920,
serialized_end=5948,
)
_ARTIST.fields_by_name['top_track'].message_type = _TOPTRACKS
_ARTIST.fields_by_name['album_group'].message_type = _ALBUMGROUP
_ARTIST.fields_by_name['single_group'].message_type = _ALBUMGROUP
_ARTIST.fields_by_name['compilation_group'].message_type = _ALBUMGROUP
_ARTIST.fields_by_name['appears_on_group'].message_type = _ALBUMGROUP
_ARTIST.fields_by_name['external_id'].message_type = _EXTERNALID
_ARTIST.fields_by_name['portrait'].message_type = _IMAGE
_ARTIST.fields_by_name['biography'].message_type = _BIOGRAPHY
_ARTIST.fields_by_name['activity_period'].message_type = _ACTIVITYPERIOD
_ARTIST.fields_by_name['restriction'].message_type = _RESTRICTION
_ARTIST.fields_by_name['related'].message_type = _ARTIST
_ARTIST.fields_by_name['portrait_group'].message_type = _IMAGEGROUP
_ARTIST.fields_by_name['sale_period'].message_type = _SALEPERIOD
_ARTIST.fields_by_name['availability'].message_type = _AVAILABILITY
_ALBUM.fields_by_name['artist'].message_type = _ARTIST
_ALBUM.fields_by_name['type'].enum_type = _ALBUM_TYPE
_ALBUM.fields_by_name['date'].message_type = _DATE
_ALBUM.fields_by_name['cover'].message_type = _IMAGE
_ALBUM.fields_by_name['external_id'].message_type = _EXTERNALID
_ALBUM.fields_by_name['disc'].message_type = _DISC
_ALBUM.fields_by_name['copyright'].message_type = _COPYRIGHT
_ALBUM.fields_by_name['restriction'].message_type = _RESTRICTION
_ALBUM.fields_by_name['related'].message_type = _ALBUM
_ALBUM.fields_by_name['sale_period'].message_type = _SALEPERIOD
_ALBUM.fields_by_name['cover_group'].message_type = _IMAGEGROUP
_ALBUM.fields_by_name['availability'].message_type = _AVAILABILITY
_ALBUM_TYPE.containing_type = _ALBUM
_TRACK.fields_by_name['album'].message_type = _ALBUM
_TRACK.fields_by_name['artist'].message_type = _ARTIST
_TRACK.fields_by_name['external_id'].message_type = _EXTERNALID
_TRACK.fields_by_name['restriction'].message_type = _RESTRICTION
_TRACK.fields_by_name['file'].message_type = _AUDIOFILE
_TRACK.fields_by_name['alternative'].message_type = _TRACK
_TRACK.fields_by_name['sale_period'].message_type = _SALEPERIOD
_TRACK.fields_by_name['preview'].message_type = _AUDIOFILE
_TRACK.fields_by_name['availability'].message_type = _AVAILABILITY
_TRACK.fields_by_name['licensor'].message_type = _LICENSOR
_SHOW.fields_by_name['cover_image'].message_type = _IMAGEGROUP
_SHOW.fields_by_name['episode'].message_type = _EPISODE
_SHOW.fields_by_name['copyright'].message_type = _COPYRIGHT
_SHOW.fields_by_name['restriction'].message_type = _RESTRICTION
_SHOW.fields_by_name['media_type'].enum_type = _SHOW_MEDIATYPE
_SHOW.fields_by_name['consumption_order'].enum_type = _SHOW_CONSUMPTIONORDER
_SHOW.fields_by_name['availability'].message_type = _AVAILABILITY
_SHOW_MEDIATYPE.containing_type = _SHOW
_SHOW_CONSUMPTIONORDER.containing_type = _SHOW
_EPISODE.fields_by_name['audio'].message_type = _AUDIOFILE
_EPISODE.fields_by_name['publish_time'].message_type = _DATE
_EPISODE.fields_by_name['cover_image'].message_type = _IMAGEGROUP
_EPISODE.fields_by_name['show'].message_type = _SHOW
_EPISODE.fields_by_name['video'].message_type = _VIDEOFILE
_EPISODE.fields_by_name['video_preview'].message_type = _VIDEOFILE
_EPISODE.fields_by_name['audio_preview'].message_type = _AUDIOFILE
_EPISODE.fields_by_name['restriction'].message_type = _RESTRICTION
_EPISODE.fields_by_name['freeze_frame'].message_type = _IMAGEGROUP
_EPISODE.fields_by_name['availability'].message_type = _AVAILABILITY
_EPISODE.fields_by_name['type'].enum_type = _EPISODE_EPISODETYPE
_EPISODE_EPISODETYPE.containing_type = _EPISODE
_TOPTRACKS.fields_by_name['track'].message_type = _TRACK
_ALBUMGROUP.fields_by_name['album'].message_type = _ALBUM
_IMAGE.fields_by_name['size'].enum_type = _IMAGE_SIZE
_IMAGE_SIZE.containing_type = _IMAGE
_IMAGEGROUP.fields_by_name['image'].message_type = _IMAGE
_BIOGRAPHY.fields_by_name['portrait'].message_type = _IMAGE
_BIOGRAPHY.fields_by_name['portrait_group'].message_type = _IMAGEGROUP
_DISC.fields_by_name['track'].message_type = _TRACK
_COPYRIGHT.fields_by_name['type'].enum_type = _COPYRIGHT_TYPE
_COPYRIGHT_TYPE.containing_type = _COPYRIGHT
_RESTRICTION.fields_by_name['catalogue'].enum_type = _RESTRICTION_CATALOGUE
_RESTRICTION.fields_by_name['type'].enum_type = _RESTRICTION_TYPE
_RESTRICTION_CATALOGUE.containing_type = _RESTRICTION
_RESTRICTION_TYPE.containing_type = _RESTRICTION
_RESTRICTION.oneofs_by_name['country_restriction'].fields.append(
_RESTRICTION.fields_by_name['countries_allowed'])
_RESTRICTION.fields_by_name[
'countries_allowed'].containing_oneof = _RESTRICTION.oneofs_by_name[
'country_restriction']
_RESTRICTION.oneofs_by_name['country_restriction'].fields.append(
_RESTRICTION.fields_by_name['countries_forbidden'])
_RESTRICTION.fields_by_name[
'countries_forbidden'].containing_oneof = _RESTRICTION.oneofs_by_name[
'country_restriction']
_AVAILABILITY.fields_by_name['start'].message_type = _DATE
_SALEPERIOD.fields_by_name['restriction'].message_type = _RESTRICTION
_SALEPERIOD.fields_by_name['start'].message_type = _DATE
_SALEPERIOD.fields_by_name['end'].message_type = _DATE
_AUDIOFILE.fields_by_name['format'].enum_type = _AUDIOFILE_FORMAT
_AUDIOFILE_FORMAT.containing_type = _AUDIOFILE
DESCRIPTOR.message_types_by_name['Artist'] = _ARTIST
DESCRIPTOR.message_types_by_name['Album'] = _ALBUM
DESCRIPTOR.message_types_by_name['Track'] = _TRACK
DESCRIPTOR.message_types_by_name['Show'] = _SHOW
DESCRIPTOR.message_types_by_name['Episode'] = _EPISODE
DESCRIPTOR.message_types_by_name['Licensor'] = _LICENSOR
DESCRIPTOR.message_types_by_name['TopTracks'] = _TOPTRACKS
DESCRIPTOR.message_types_by_name['ActivityPeriod'] = _ACTIVITYPERIOD
DESCRIPTOR.message_types_by_name['AlbumGroup'] = _ALBUMGROUP
DESCRIPTOR.message_types_by_name['Date'] = _DATE
DESCRIPTOR.message_types_by_name['Image'] = _IMAGE
DESCRIPTOR.message_types_by_name['ImageGroup'] = _IMAGEGROUP
DESCRIPTOR.message_types_by_name['Biography'] = _BIOGRAPHY
DESCRIPTOR.message_types_by_name['Disc'] = _DISC
DESCRIPTOR.message_types_by_name['Copyright'] = _COPYRIGHT
DESCRIPTOR.message_types_by_name['Restriction'] = _RESTRICTION
DESCRIPTOR.message_types_by_name['Availability'] = _AVAILABILITY
DESCRIPTOR.message_types_by_name['SalePeriod'] = _SALEPERIOD
DESCRIPTOR.message_types_by_name['ExternalId'] = _EXTERNALID
DESCRIPTOR.message_types_by_name['AudioFile'] = _AUDIOFILE
DESCRIPTOR.message_types_by_name['VideoFile'] = _VIDEOFILE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Artist = _reflection.GeneratedProtocolMessageType(
'Artist',
(_message.Message, ),
{
'DESCRIPTOR': _ARTIST,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Artist)
})
_sym_db.RegisterMessage(Artist)
Album = _reflection.GeneratedProtocolMessageType(
'Album',
(_message.Message, ),
{
'DESCRIPTOR': _ALBUM,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Album)
})
_sym_db.RegisterMessage(Album)
Track = _reflection.GeneratedProtocolMessageType(
'Track',
(_message.Message, ),
{
'DESCRIPTOR': _TRACK,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Track)
})
_sym_db.RegisterMessage(Track)
Show = _reflection.GeneratedProtocolMessageType(
'Show',
(_message.Message, ),
{
'DESCRIPTOR': _SHOW,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Show)
})
_sym_db.RegisterMessage(Show)
Episode = _reflection.GeneratedProtocolMessageType(
'Episode',
(_message.Message, ),
{
'DESCRIPTOR': _EPISODE,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Episode)
})
_sym_db.RegisterMessage(Episode)
Licensor = _reflection.GeneratedProtocolMessageType(
'Licensor',
(_message.Message, ),
{
'DESCRIPTOR': _LICENSOR,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Licensor)
})
_sym_db.RegisterMessage(Licensor)
TopTracks = _reflection.GeneratedProtocolMessageType(
'TopTracks',
(_message.Message, ),
{
'DESCRIPTOR': _TOPTRACKS,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.TopTracks)
})
_sym_db.RegisterMessage(TopTracks)
ActivityPeriod = _reflection.GeneratedProtocolMessageType(
'ActivityPeriod',
(_message.Message, ),
{
'DESCRIPTOR': _ACTIVITYPERIOD,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.ActivityPeriod)
})
_sym_db.RegisterMessage(ActivityPeriod)
AlbumGroup = _reflection.GeneratedProtocolMessageType(
'AlbumGroup',
(_message.Message, ),
{
'DESCRIPTOR': _ALBUMGROUP,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.AlbumGroup)
})
_sym_db.RegisterMessage(AlbumGroup)
Date = _reflection.GeneratedProtocolMessageType(
'Date',
(_message.Message, ),
{
'DESCRIPTOR': _DATE,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Date)
})
_sym_db.RegisterMessage(Date)
Image = _reflection.GeneratedProtocolMessageType(
'Image',
(_message.Message, ),
{
'DESCRIPTOR': _IMAGE,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Image)
})
_sym_db.RegisterMessage(Image)
ImageGroup = _reflection.GeneratedProtocolMessageType(
'ImageGroup',
(_message.Message, ),
{
'DESCRIPTOR': _IMAGEGROUP,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.ImageGroup)
})
_sym_db.RegisterMessage(ImageGroup)
Biography = _reflection.GeneratedProtocolMessageType(
'Biography',
(_message.Message, ),
{
'DESCRIPTOR': _BIOGRAPHY,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Biography)
})
_sym_db.RegisterMessage(Biography)
Disc = _reflection.GeneratedProtocolMessageType(
'Disc',
(_message.Message, ),
{
'DESCRIPTOR': _DISC,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Disc)
})
_sym_db.RegisterMessage(Disc)
Copyright = _reflection.GeneratedProtocolMessageType(
'Copyright',
(_message.Message, ),
{
'DESCRIPTOR': _COPYRIGHT,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Copyright)
})
_sym_db.RegisterMessage(Copyright)
Restriction = _reflection.GeneratedProtocolMessageType(
'Restriction',
(_message.Message, ),
{
'DESCRIPTOR': _RESTRICTION,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Restriction)
})
_sym_db.RegisterMessage(Restriction)
Availability = _reflection.GeneratedProtocolMessageType(
'Availability',
(_message.Message, ),
{
'DESCRIPTOR': _AVAILABILITY,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.Availability)
})
_sym_db.RegisterMessage(Availability)
SalePeriod = _reflection.GeneratedProtocolMessageType(
'SalePeriod',
(_message.Message, ),
{
'DESCRIPTOR': _SALEPERIOD,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.SalePeriod)
})
_sym_db.RegisterMessage(SalePeriod)
ExternalId = _reflection.GeneratedProtocolMessageType(
'ExternalId',
(_message.Message, ),
{
'DESCRIPTOR': _EXTERNALID,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.ExternalId)
})
_sym_db.RegisterMessage(ExternalId)
AudioFile = _reflection.GeneratedProtocolMessageType(
'AudioFile',
(_message.Message, ),
{
'DESCRIPTOR': _AUDIOFILE,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.AudioFile)
})
_sym_db.RegisterMessage(AudioFile)
VideoFile = _reflection.GeneratedProtocolMessageType(
'VideoFile',
(_message.Message, ),
{
'DESCRIPTOR': _VIDEOFILE,
'__module__': 'metadata_pb2'
# @@protoc_insertion_point(class_scope:spotify.metadata.proto.VideoFile)
})
_sym_db.RegisterMessage(VideoFile)
DESCRIPTOR._options = None
_SHOW.fields_by_name['deprecated_popularity']._options = None
_EPISODE.fields_by_name['deprecated_popularity']._options = None
# @@protoc_insertion_point(module_scope)
| 34.741011
| 10,162
| 0.593345
| 13,243
| 128,507
| 5.444159
| 0.038813
| 0.053594
| 0.093291
| 0.077895
| 0.835472
| 0.810131
| 0.769214
| 0.726105
| 0.699252
| 0.683759
| 0
| 0.042617
| 0.307968
| 128,507
| 3,698
| 10,163
| 34.750406
| 0.76809
| 0.013065
| 0
| 0.817754
| 1
| 0.000277
| 0.161431
| 0.131352
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001106
| 0
| 0.001106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e8c10c4f512a8859ad0d6347eb53b0188b0c7e0b
| 123
|
py
|
Python
|
src/omlt/neuralnet/layers/__init__.py
|
fracek/OMLT
|
b0ccafda34d1ea2b1187186081ed50f17c10ba7f
|
[
"BSD-3-Clause"
] | 115
|
2021-11-04T03:15:35.000Z
|
2022-03-28T19:05:55.000Z
|
src/omlt/neuralnet/layers/__init__.py
|
fracek/OMLT
|
b0ccafda34d1ea2b1187186081ed50f17c10ba7f
|
[
"BSD-3-Clause"
] | 56
|
2021-11-03T13:59:41.000Z
|
2022-03-21T14:01:52.000Z
|
src/omlt/neuralnet/layers/__init__.py
|
fracek/OMLT
|
b0ccafda34d1ea2b1187186081ed50f17c10ba7f
|
[
"BSD-3-Clause"
] | 17
|
2021-11-04T03:15:23.000Z
|
2022-03-24T02:24:15.000Z
|
from .full_space import full_space_dense_layer, full_space_conv_layer
from .reduced_space import reduced_space_dense_layer
| 41
| 69
| 0.902439
| 20
| 123
| 5
| 0.4
| 0.27
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 123
| 2
| 70
| 61.5
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fa71eeac3d674093c4c774373dd96dc7070f8a46
| 22,469
|
py
|
Python
|
python/powermeter_api/api/recent_api.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
python/powermeter_api/api/recent_api.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
python/powermeter_api/api/recent_api.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
PowerMeter API
API # noqa: E501
The version of the OpenAPI document: 2021.4.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from powermeter_api.api_client import ApiClient
from powermeter_api.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class RecentApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def recent_dashboard_list(self, **kwargs): # noqa: E501
"""recent_dashboard_list # noqa: E501
Get list of recent design dashboards # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_dashboard_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[DashboardAccess]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.recent_dashboard_list_with_http_info(**kwargs) # noqa: E501
def recent_dashboard_list_with_http_info(self, **kwargs): # noqa: E501
"""recent_dashboard_list # noqa: E501
Get list of recent design dashboards # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_dashboard_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[DashboardAccess], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method recent_dashboard_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/recent/dashboard/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DashboardAccess]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def recent_design_list(self, **kwargs): # noqa: E501
"""recent_design_list # noqa: E501
Get list of recent designs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_design_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[DesignAccess]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.recent_design_list_with_http_info(**kwargs) # noqa: E501
def recent_design_list_with_http_info(self, **kwargs): # noqa: E501
"""recent_design_list # noqa: E501
Get list of recent designs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_design_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[DesignAccess], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method recent_design_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/recent/design/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DesignAccess]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def recent_project_list(self, **kwargs): # noqa: E501
"""recent_project_list # noqa: E501
Get list of recent projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_project_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ProjectAccess]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.recent_project_list_with_http_info(**kwargs) # noqa: E501
def recent_project_list_with_http_info(self, **kwargs): # noqa: E501
"""recent_project_list # noqa: E501
Get list of recent projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_project_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ProjectAccess], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method recent_project_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/recent/project/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProjectAccess]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def recent_scenario_list(self, **kwargs): # noqa: E501
"""recent_scenario_list # noqa: E501
Get list of recent project scenarios # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_scenario_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ScenarioAccess]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.recent_scenario_list_with_http_info(**kwargs) # noqa: E501
def recent_scenario_list_with_http_info(self, **kwargs): # noqa: E501
"""recent_scenario_list # noqa: E501
Get list of recent project scenarios # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_scenario_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ScenarioAccess], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method recent_scenario_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/recent/scenario/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ScenarioAccess]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def recent_simulation_list(self, **kwargs): # noqa: E501
"""recent_simulation_list # noqa: E501
Get list of recent sims # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_simulation_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[SimAccess]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.recent_simulation_list_with_http_info(**kwargs) # noqa: E501
def recent_simulation_list_with_http_info(self, **kwargs): # noqa: E501
"""recent_simulation_list # noqa: E501
Get list of recent sims # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recent_simulation_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[SimAccess], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method recent_simulation_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/recent/simulation/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SimAccess]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.909414
| 96
| 0.580044
| 2,398
| 22,469
| 5.173478
| 0.071726
| 0.036112
| 0.045139
| 0.036273
| 0.933661
| 0.933661
| 0.925601
| 0.925601
| 0.925601
| 0.903434
| 0
| 0.013205
| 0.349504
| 22,469
| 562
| 97
| 39.980427
| 0.835591
| 0.480573
| 0
| 0.731225
| 1
| 0
| 0.146612
| 0.039083
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.019763
| 0
| 0.106719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d744d61c248f582a0e3f83b3d3b43d79fb1c17ac
| 223
|
py
|
Python
|
nmigen/vendor/xilinx.py
|
psumesh/nmigen
|
7d611b8fc1d9e58853ff268ec38ff8f4131a9774
|
[
"BSD-2-Clause"
] | 528
|
2020-01-28T18:21:00.000Z
|
2021-12-09T06:27:51.000Z
|
nmigen/vendor/xilinx.py
|
psumesh/nmigen
|
7d611b8fc1d9e58853ff268ec38ff8f4131a9774
|
[
"BSD-2-Clause"
] | 360
|
2020-01-28T18:34:30.000Z
|
2021-12-10T08:03:32.000Z
|
nmigen/vendor/xilinx.py
|
psumesh/nmigen
|
7d611b8fc1d9e58853ff268ec38ff8f4131a9774
|
[
"BSD-2-Clause"
] | 100
|
2020-02-06T21:55:46.000Z
|
2021-11-25T19:20:44.000Z
|
from amaranth.vendor.xilinx import *
from amaranth.vendor.xilinx import __all__
import warnings
warnings.warn("instead of nmigen.vendor.xilinx, use amaranth.vendor.xilinx",
DeprecationWarning, stacklevel=2)
| 27.875
| 76
| 0.7713
| 27
| 223
| 6.222222
| 0.555556
| 0.285714
| 0.357143
| 0.285714
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005263
| 0.147982
| 223
| 7
| 77
| 31.857143
| 0.878947
| 0
| 0
| 0
| 0
| 0
| 0.264574
| 0.192825
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d7584cfd8836d2082bc285338a1155104c8d7013
| 35,879
|
py
|
Python
|
backends/tests/integration/core/models_tests.py
|
sltk/crmint
|
bc8417bc4ed225faa5caa88daca48f1f12f2ac94
|
[
"Apache-2.0"
] | null | null | null |
backends/tests/integration/core/models_tests.py
|
sltk/crmint
|
bc8417bc4ed225faa5caa88daca48f1f12f2ac94
|
[
"Apache-2.0"
] | null | null | null |
backends/tests/integration/core/models_tests.py
|
sltk/crmint
|
bc8417bc4ed225faa5caa88daca48f1f12f2ac94
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.api import taskqueue
from google.appengine.ext import testbed
import mock
from core import cache
from core import models
from tests import utils
class TestPipelineWithJobs(utils.ModelTestCase):
def setUp(self):
super(TestPipelineWithJobs, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_taskqueue_stub()
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
def tearDown(self):
super(TestPipelineWithJobs, self).tearDown()
self.testbed.deactivate()
def test_start_fails_without_jobs(self):
pipeline = models.Pipeline.create()
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
def test_start_fails_if_already_running(self):
pipeline = models.Pipeline.create()
pipeline.status = models.Pipeline.STATUS.RUNNING
pipeline.save()
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
def test_start_succeeds_with_one_job_idle(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
result = pipeline.start()
self.assertEqual(result, True)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
def test_start_fails_with_one_job_running(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job1.status = models.Job.STATUS.RUNNING
job1.save()
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
def test_start_succeeds_with_one_job_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job1.status = models.Job.STATUS.SUCCEEDED
job1.save()
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
result = pipeline.start()
self.assertEqual(result, True)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
def test_start_succeeds_with_one_job_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job1.status = models.Job.STATUS.FAILED
job1.save()
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
result = pipeline.start()
self.assertEqual(result, True)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
@mock.patch('core.cloud_logging.logger')
def test_start_fails_with_one_job_not_getting_ready(self, patched_logger):
patched_logger.log_struct.__name__ = 'foo'
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
models.Param.create(
job_id=job1.id,
name='field1',
type='number',
value='{% ABC %}') # initialize with a non-boolean value
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
def test_stop_fails_if_not_running(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.IDLE)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.IDLE)
result = pipeline.stop()
self.assertEqual(result, False)
def test_stop_succeeds_and_stop_all_jobs(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.RUNNING)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.RUNNING)
self.assertEqual(len(pipeline.jobs.all()), 3)
self.assertEqual(pipeline.jobs[0].get_status(), models.Job.STATUS.SUCCEEDED)
self.assertEqual(pipeline.jobs[1].get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(pipeline.jobs[2].get_status(), models.Job.STATUS.RUNNING)
result = pipeline.stop()
self.assertTrue(result)
self.assertEqual(pipeline.jobs[0].get_status(), models.Job.STATUS.SUCCEEDED)
self.assertEqual(pipeline.jobs[1].get_status(), models.Job.STATUS.FAILED)
self.assertEqual(pipeline.jobs[2].get_status(), models.Job.STATUS.FAILED)
def test_stop_succeeds_if_all_jobs_succeeded(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
self.assertEqual(len(pipeline.jobs.all()), 3)
result = pipeline.stop()
self.assertTrue(result)
self.assertEqual(pipeline.jobs[0].get_status(), models.Job.STATUS.SUCCEEDED)
self.assertEqual(pipeline.jobs[1].get_status(), models.Job.STATUS.SUCCEEDED)
self.assertEqual(pipeline.jobs[2].get_status(), models.Job.STATUS.SUCCEEDED)
def test_start_single_job_succeeds(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.IDLE)
job1 = models.Job.create(pipeline_id=pipeline.id)
result = pipeline.start_single_job(job1)
self.assertTrue(result)
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
def test_start_single_job_fails_if_running(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
job1 = models.Job.create(pipeline_id=pipeline.id)
result = pipeline.start_single_job(job1)
self.assertFalse(result)
self.assertEqual(job1.get_status(), models.Job.STATUS.IDLE)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
def test_job_finished_succeeds(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
result = pipeline.job_finished()
self.assertTrue(result)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.SUCCEEDED)
def test_job_finished_fails_if_one_remains(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.RUNNING)
result = pipeline.job_finished()
self.assertFalse(result)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.RUNNING)
def test_job_finished_fails_if_mix_succeeded_and_failed(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
job1 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
job2 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.FAILED)
models.StartCondition.create(job_id=job2.id, preceding_job_id=None)
result = pipeline.job_finished()
self.assertTrue(result)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.FAILED)
def test_pipeline_success_with_failed_condition_fulfilled(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
job1 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
job2 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.FAILED)
job3 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.SUCCEEDED)
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition=models.StartCondition.CONDITION.FAIL)
result = pipeline.job_finished()
self.assertTrue(result)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.SUCCEEDED)
def test_successfully_cancel_tasks_on_failure_without_conditions(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertIsNotNone(task1)
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job1._enqueued_task_count(), 1)
task2 = job2.start()
self.assertIsNotNone(task2)
self.assertEqual(job2.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2._enqueued_task_count(), 1)
job2.task_failed(task2.name)
self.assertEqual(job2.get_status(), models.Job.STATUS.FAILED)
# It should trigger the end of the pipeline by itself
self.assertEqual(job1.get_status(), models.Job.STATUS.FAILED)
self.assertEqual(job1._enqueued_task_count(), 0)
self.assertEqual(job2._enqueued_task_count(), 0)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.FAILED)
class TestPipelineDestroy(utils.ModelTestCase):
def setUp(self):
super(TestPipelineDestroy, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
def tearDown(self):
super(TestPipelineDestroy, self).tearDown()
self.testbed.deactivate()
def test_destroy_succeeds(self):
pipeline = models.Pipeline.create()
pipeline.destroy()
self.assertIsNone(models.Pipeline.find(pipeline.id))
def test_destroy_deletes_all_schedules(self):
pipeline = models.Pipeline.create()
sc1 = models.Schedule.create(pipeline_id=pipeline.id)
self.assertIsNotNone(models.Schedule.find(sc1.id))
pipeline.destroy()
self.assertIsNone(models.Schedule.find(sc1.id))
def test_destroy_deletes_all_jobs(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, name='j1')
self.assertIsNotNone(models.Job.find(job1.id))
pipeline.destroy()
self.assertIsNone(models.Job.find(job1.id))
def test_destroy_deletes_all_params(self):
pipeline = models.Pipeline.create()
param1 = models.Param.create(
pipeline_id=pipeline.id,
name='p1',
type='string')
self.assertIsNotNone(models.Param.find(param1.id))
pipeline.destroy()
self.assertIsNone(models.Param.find(param1.id))
class TestPipelineImport(utils.ModelTestCase):
def setUp(self):
super(TestPipelineImport, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_taskqueue_stub()
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
def tearDown(self):
super(TestPipelineImport, self).tearDown()
self.testbed.deactivate()
def test_import_data_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create()
job2 = models.Job.create()
data = {
'params': [
{'name': 'p1', 'type': 'string', 'value': 'foo'},
{'name': 'p2', 'type': 'string', 'value': 'bar'},
],
'schedules': [
{'id': None, 'cron': 'NEW1'},
{'id': None, 'cron': 'NEW2'},
],
'jobs': [
{'id': job1.id, 'name': 'j1', 'hash_start_conditions': []},
{'id': job2.id, 'name': 'j2', 'hash_start_conditions': []},
]
}
pipeline.import_data(data)
self.assertEqual(len(pipeline.params.all()), 2)
self.assertEqual(pipeline.params[0].name, 'p1')
self.assertEqual(pipeline.params[0].value, 'foo')
self.assertEqual(pipeline.params[1].name, 'p2')
self.assertEqual(pipeline.params[1].value, 'bar')
self.assertEqual(len(pipeline.jobs.all()), 2)
self.assertEqual(pipeline.jobs[0].name, 'j1')
self.assertEqual(pipeline.jobs[1].name, 'j2')
class TestJobStartedStatus(utils.ModelTestCase):
def setUp(self):
super(TestJobStartedStatus, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
super(TestJobStartedStatus, self).tearDown()
self.testbed.deactivate()
def test_succeeds_status_running(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job.status, models.Job.STATUS.WAITING)
self.assertTrue(job.start())
self.assertEqual(job.status, models.Job.STATUS.RUNNING)
class TestJobDestroy(utils.ModelTestCase):
def setUp(self):
super(TestJobDestroy, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
def tearDown(self):
super(TestJobDestroy, self).tearDown()
self.testbed.deactivate()
def test_destroy_succeeds(self):
job = models.Job.create()
job.destroy()
self.assertIsNone(models.Job.find(job.id))
def test_destroy_deletes_all_starting_conditions(self):
job1 = models.Job.create()
job2 = models.Job.create()
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id)
self.assertIsNotNone(models.StartCondition.find(sc1.id))
job2.destroy()
self.assertIsNone(models.StartCondition.find(sc1.id))
def test_destroy_deletes_preceding_starting_conditions(self):
job1 = models.Job.create()
job2 = models.Job.create()
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id)
self.assertIsNotNone(models.StartCondition.find(sc1.id))
job1.destroy()
self.assertIsNone(models.StartCondition.find(sc1.id))
def test_destroy_deletes_all_params(self):
job = models.Job.create()
param1 = models.Param.create(
job_id=job.id,
name='p1',
type='string')
self.assertIsNotNone(models.Param.find(param1.id))
job.destroy()
self.assertIsNone(models.Param.find(param1.id))
class TestStartConditionWithJobs(utils.ModelTestCase):
def setUp(self):
super(TestStartConditionWithJobs, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
def tearDown(self):
super(TestStartConditionWithJobs, self).tearDown()
self.testbed.deactivate()
def test_value_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, name='job1')
job2 = models.Job.create(pipeline_id=pipeline.id, name='job2')
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.SUCCESS)
self.assertEqual(sc1.value, '%s,success' % job1.id)
def test_preceding_job_name_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, name='job1')
job2 = models.Job.create(pipeline_id=pipeline.id, name='job2')
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.SUCCESS)
self.assertEqual(sc1.preceding_job_name, 'job1')
class TestJobStartConditions(utils.ModelTestCase):
def setUp(self):
super(TestJobStartConditions, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
super(TestJobStartConditions, self).tearDown()
self.testbed.deactivate()
def test_create_start_conditions_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.IDLE)
job2 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.IDLE)
job3 = models.Job.create(pipeline_id=pipeline.id, status=models.Job.STATUS.IDLE)
arg_start_conditions = [
{'preceding_job_id': job1.id, 'condition': models.StartCondition.CONDITION.SUCCESS},
{'preceding_job_id': job2.id, 'condition': models.StartCondition.CONDITION.SUCCESS},
]
job3.assign_start_conditions(arg_start_conditions)
self.assertEqual(len(job3.start_conditions), 2)
def test_update_start_conditions_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
job3 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition=models.StartCondition.CONDITION.FAIL)
arg_start_conditions = [
{
'preceding_job_id': job1.id,
'condition': models.StartCondition.CONDITION.SUCCESS},
{
'preceding_job_id': job2.id,
'condition': models.StartCondition.CONDITION.SUCCESS},
]
self.assertEqual(len(job3.start_conditions), 1)
self.assertEqual(job3.start_conditions[0].condition,
models.StartCondition.CONDITION.FAIL)
job3.assign_start_conditions(arg_start_conditions)
self.assertEqual(len(job3.start_conditions), 2)
self.assertEqual(job3.start_conditions[0].condition,
models.StartCondition.CONDITION.SUCCESS)
self.assertEqual(job3.start_conditions[1].condition,
models.StartCondition.CONDITION.SUCCESS)
def test_fails_if_running(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job.get_status(), models.Job.STATUS.WAITING)
task1 = job.start()
self.assertIsNotNone(task1)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
task2 = job.start()
self.assertIsNone(task2)
def test_succeeds_if_waiting_without_start_conditions(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job.get_status(), models.Job.STATUS.WAITING)
task = job.start()
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
self.assertIsNotNone(task)
def test_succeeds_with_start_condition_fulfill_success_with_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='success')
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
job1.task_succeeded(task1.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.SUCCEEDED)
self.assertEqual(job2.get_status(), models.Job.STATUS.RUNNING)
def test_fails_with_start_condition_unfulfill_success_with_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.SUCCESS)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
job1.task_failed(task1.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.FAILED)
self.assertEqual(job2.get_status(), models.Job.STATUS.FAILED)
def test_succeeds_with_start_condition_fulfill_fail_with_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.FAIL)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
job1.task_failed(task1.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.FAILED)
self.assertEqual(job2.get_status(), models.Job.STATUS.RUNNING)
self.assertNotEqual(pipeline.status, models.Pipeline.STATUS.FAILED)
def test_fails_with_start_condition_unfulfill_fail_with_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='fail')
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
job1.task_succeeded(task1.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.SUCCEEDED)
self.assertEqual(job2.get_status(), models.Job.STATUS.FAILED)
def test_succeeds_with_start_condition_fulfill_whatever_with_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.WHATEVER)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
job1.task_failed(task1.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.FAILED)
self.assertEqual(job2.get_status(), models.Job.STATUS.RUNNING)
def test_succeeds_with_start_condition_fulfill_whatever_with_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.WHATEVER)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
job1.task_succeeded(task1.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.SUCCEEDED)
self.assertEqual(job2.get_status(), models.Job.STATUS.RUNNING)
def test_fails_with_start_condition_unfulfill_whatever_with_running(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.WHATEVER)
self.assertTrue(pipeline.get_ready())
task1 = job1.start()
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
task2 = job2.start()
self.assertIsNone(task2)
self.assertEqual(job1.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
class TestJobStopConditions(utils.ModelTestCase):
def setUp(self):
super(TestJobStopConditions, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
super(TestJobStopConditions, self).tearDown()
self.testbed.deactivate()
def test_stop_fails_with_idle(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
self.assertEqual(job1.get_status(), models.Job.STATUS.IDLE)
result = job1.stop()
self.assertFalse(result)
self.assertEqual(job1.get_status(), models.Job.STATUS.IDLE)
def test_stop_reset_to_idle(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
result = job1.stop()
self.assertTrue(result)
self.assertEqual(job1.status, models.Job.STATUS.IDLE)
self.assertEqual(job1.get_status(), models.Job.STATUS.IDLE)
def test_stop_succeeds_with_running(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
self.assertTrue(pipeline.get_ready())
task1 = job1.start()
self.assertIsNotNone(task1)
self.assertTrue(job1.stop())
self.assertEqual(job1.get_status(), models.Job.STATUS.STOPPING)
def test_stop_succeeds_with_outdated_tasks(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
self.assertTrue(pipeline.get_ready())
task1 = job1.start()
self.assertIsNotNone(task1)
taskqueue.Queue().delete_tasks([taskqueue.Task(name=task1.name)])
self.assertTrue(job1.stop())
self.assertEqual(job1.get_status(), models.Job.STATUS.STOPPING)
class TestJobStartWithDependentJobs(utils.ModelTestCase):
def setUp(self):
super(TestJobStartWithDependentJobs, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
super(TestJobStartWithDependentJobs, self).tearDown()
self.testbed.deactivate()
def test_start_fails_with_dependent_jobs_and_expecting_success(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
job3 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.SUCCESS)
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition=models.StartCondition.CONDITION.SUCCESS)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job3.get_status(), models.Job.STATUS.WAITING)
task = job1.start()
self.assertIsNotNone(task)
job1.task_failed(task.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.FAILED)
self.assertEqual(job2.get_status(), models.Job.STATUS.FAILED)
self.assertEqual(job3.get_status(), models.Job.STATUS.FAILED)
def test_start_fails_with_dependent_jobs_and_expecting_fail(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
job3 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.FAIL)
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition=models.StartCondition.CONDITION.SUCCESS)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job3.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
self.assertIsNotNone(task1)
job1.task_succeeded(task1.name)
task2 = job2.start()
self.assertIsNone(task2)
self.assertEqual(job2.get_status(), models.Job.STATUS.FAILED)
self.assertEqual(job3.get_status(), models.Job.STATUS.FAILED)
def test_dependent_job_starts_after_multiple_workers_finish_with_fail(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
job3 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.FAIL)
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition=models.StartCondition.CONDITION.SUCCESS)
self.assertTrue(pipeline.get_ready())
self.assertEqual(job1.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job2.get_status(), models.Job.STATUS.WAITING)
self.assertEqual(job3.get_status(), models.Job.STATUS.WAITING)
task1 = job1.start()
task2 = job1.enqueue(job1.worker_class, {})
self.assertIsNotNone(task1)
job1.task_succeeded(task1.name)
job1.task_failed(task2.name)
self.assertEqual(job1.get_status(), models.Job.STATUS.FAILED)
task3 = job2.start()
self.assertIsNone(task3)
self.assertEqual(job2.get_status(), models.Job.STATUS.RUNNING)
self.assertEqual(job3.get_status(), models.Job.STATUS.WAITING)
class TestJobStartingMultipleTasks(utils.ModelTestCase):
def setUp(self):
super(TestJobStartingMultipleTasks, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
super(TestJobStartingMultipleTasks, self).tearDown()
self.testbed.deactivate()
def test_succeeds_completing_tasks_in_series(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id)
worker_params = dict([(p.name, p.val) for p in job.params])
self.assertTrue(pipeline.get_ready())
self.assertEqual(job.get_status(), models.Job.STATUS.WAITING)
task1 = job.start()
self.assertIsNotNone(task1)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
task2 = job.enqueue(job.worker_class, worker_params)
self.assertIsNotNone(task2)
job.task_succeeded(task1.name)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
job.task_succeeded(task2.name)
self.assertEqual(job.get_status(), models.Job.STATUS.SUCCEEDED)
def test_pipeline_fails_second_task_succeeded_fail_start_condition_fail(self):
pipeline = models.Pipeline.create(status=models.Pipeline.STATUS.RUNNING)
job1 = models.Job.create(pipeline_id=pipeline.id)
job2 = models.Job.create(pipeline_id=pipeline.id)
job3 = models.Job.create(pipeline_id=pipeline.id)
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition=models.StartCondition.CONDITION.SUCCESS)
pipeline.get_ready()
task1 = job1.start()
job1.task_failed(task1.name)
self.assertTrue(job1.get_status(), models.Job.STATUS.FAILED)
self.assertTrue(job2.get_status(), models.Job.STATUS.STOPPING)
self.assertEqual(pipeline.status, models.Pipeline.STATUS.FAILED)
def test_succeeds_completing_tasks_in_parallel(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id)
worker_params = dict([(p.name, p.val) for p in job.params])
self.assertTrue(pipeline.get_ready())
self.assertEqual(job.get_status(), models.Job.STATUS.WAITING)
task1 = job.start()
self.assertIsNotNone(task1)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
task2 = job.enqueue(job.worker_class, worker_params)
task3 = job.enqueue(job.worker_class, worker_params)
self.assertIsNotNone(task2)
self.assertIsNotNone(task3)
job.task_succeeded(task1.name)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
job.task_succeeded(task3.name)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
job.task_succeeded(task2.name)
self.assertEqual(job.get_status(), models.Job.STATUS.SUCCEEDED)
def test_succeeds_completing_tasks_with_multiple_memcache_clients(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id)
worker_params = dict([(p.name, p.val) for p in job.params])
self.assertTrue(pipeline.get_ready())
self.assertEqual(job.get_status(), models.Job.STATUS.WAITING)
task1 = job.start()
self.assertIsNotNone(task1)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
# Simulates that the task will complete from another process/machine.
cache.clear_memcache_client()
job = models.Job.find(job.id) # refresh the job entity
task2 = job.enqueue(job.worker_class, worker_params)
task3 = job.enqueue(job.worker_class, worker_params)
self.assertIsNotNone(task2)
self.assertIsNotNone(task3)
job.task_succeeded(task1.name)
# Simulates that the task will complete from another process/machine.
cache.clear_memcache_client()
job = models.Job.find(job.id) # refresh the job entity
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
job.task_succeeded(task3.name)
self.assertEqual(job.get_status(), models.Job.STATUS.RUNNING)
# Simulates that the task will complete from another process/machine.
cache.clear_memcache_client()
job = models.Job.find(job.id) # refresh the job entity
job.task_succeeded(task2.name)
self.assertEqual(job.get_status(), models.Job.STATUS.SUCCEEDED)
| 41.24023
| 92
| 0.738092
| 4,582
| 35,879
| 5.625491
| 0.053907
| 0.072975
| 0.07216
| 0.101024
| 0.895019
| 0.869219
| 0.823208
| 0.802995
| 0.77933
| 0.759505
| 0
| 0.012509
| 0.13997
| 35,879
| 869
| 93
| 41.287687
| 0.822828
| 0.036205
| 0
| 0.761194
| 0
| 0
| 0.010623
| 0.001939
| 0
| 0
| 0
| 0
| 0.305292
| 1
| 0.096337
| false
| 0
| 0.014925
| 0
| 0.12483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d784168dac88b85eacedf7bc81a50fb3bf6ae2ca
| 196
|
py
|
Python
|
built-in/TensorFlow/Official/nlp/Transformer_for_TensorFlow/noahnmt/decoders/__init__.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
built-in/TensorFlow/Official/nlp/Transformer_for_TensorFlow/noahnmt/decoders/__init__.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | 3
|
2021-03-31T20:15:40.000Z
|
2022-02-09T23:50:46.000Z
|
built-in/TensorFlow/Official/nlp/Transformer_for_TensorFlow/noahnmt/decoders/__init__.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright Huawei Noah's Ark Lab.
from noahnmt.decoders import attention_decoder
from noahnmt.decoders import beam_search_decoder
from noahnmt.decoders import transformer_decoder
| 28
| 48
| 0.846939
| 28
| 196
| 5.785714
| 0.642857
| 0.203704
| 0.351852
| 0.462963
| 0.395062
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005714
| 0.107143
| 196
| 6
| 49
| 32.666667
| 0.92
| 0.229592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ad06a083b65ff952a6f645b56ae1d8a2a566a546
| 42
|
py
|
Python
|
tests/test_psdm_qs_cli.py
|
teddyrendahl/psdm_qs_cli
|
3b693932f64daa948319f24441a326920a7a7f08
|
[
"MIT"
] | null | null | null |
tests/test_psdm_qs_cli.py
|
teddyrendahl/psdm_qs_cli
|
3b693932f64daa948319f24441a326920a7a7f08
|
[
"MIT"
] | null | null | null |
tests/test_psdm_qs_cli.py
|
teddyrendahl/psdm_qs_cli
|
3b693932f64daa948319f24441a326920a7a7f08
|
[
"MIT"
] | null | null | null |
def test_import():
import psdm_qs_cli
| 14
| 22
| 0.738095
| 7
| 42
| 4
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 23
| 21
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ad1562186d9b68e5f46211db9be82f2787739647
| 17,161
|
py
|
Python
|
src/scripts/data_processing/combine_ts_fresh_features.py
|
arnabbiswas1/k_tab_aug_muticlass_rmse_logloss_weightedf1_stratified_tsfresh_cesium
|
13db3cb9d0b2f25181ccf4b1316e12425abfc276
|
[
"Apache-2.0"
] | null | null | null |
src/scripts/data_processing/combine_ts_fresh_features.py
|
arnabbiswas1/k_tab_aug_muticlass_rmse_logloss_weightedf1_stratified_tsfresh_cesium
|
13db3cb9d0b2f25181ccf4b1316e12425abfc276
|
[
"Apache-2.0"
] | null | null | null |
src/scripts/data_processing/combine_ts_fresh_features.py
|
arnabbiswas1/k_tab_aug_muticlass_rmse_logloss_weightedf1_stratified_tsfresh_cesium
|
13db3cb9d0b2f25181ccf4b1316e12425abfc276
|
[
"Apache-2.0"
] | null | null | null |
"""
Script to combine the features generated for tsfresh and write the
combined DF back to disk
"""
from src import common
import pandas as pd
import src.config.constants as constants
def select_features(logger, df, features_to_drop):
logger.info(f"Shape of the features {df.shape}")
df = df.drop(features_to_drop, axis=1)
logger.info(f"Shape of the features after dropping {df.shape}")
return df
def load_data(logger, name, features_to_drop):
df = pd.read_parquet(f"{constants.FEATURES_DATA_DIR}/cast/{name}_cast.parquet")
logger.info(f"Shape of {name} before droipping {df.shape}")
df = select_features(df, features_to_drop)
logger.info(f"Shape of {name} after droipping {df.shape}")
return df
def combine_features(logger):
name = "mixed_1_set"
features_to_drop = [
"loan__has_duplicate_min",
"loan__length",
"loan__sample_entropy",
]
df_mixed_1_set = load_data(logger, name, features_to_drop)
name = "symmetry_large_std_quantile_set"
features_to_drop = features_to_drop = [
"loan__symmetry_looking__r_0.0",
"loan__symmetry_looking__r_0.1",
"loan__symmetry_looking__r_0.15000000000000002",
"loan__symmetry_looking__r_0.2",
"loan__symmetry_looking__r_0.25",
"loan__symmetry_looking__r_0.30000000000000004",
"loan__symmetry_looking__r_0.35000000000000003",
"loan__symmetry_looking__r_0.4",
"loan__symmetry_looking__r_0.45",
"loan__symmetry_looking__r_0.5",
"loan__symmetry_looking__r_0.55",
"loan__symmetry_looking__r_0.6000000000000001",
"loan__symmetry_looking__r_0.65",
"loan__symmetry_looking__r_0.7000000000000001",
"loan__symmetry_looking__r_0.75",
"loan__symmetry_looking__r_0.8",
"loan__symmetry_looking__r_0.8500000000000001",
"loan__symmetry_looking__r_0.9",
"loan__symmetry_looking__r_0.9500000000000001",
"loan__large_standard_deviation__r_0.05",
"loan__large_standard_deviation__r_0.1",
"loan__large_standard_deviation__r_0.15000000000000002",
"loan__large_standard_deviation__r_0.30000000000000004",
"loan__large_standard_deviation__r_0.35000000000000003",
"loan__large_standard_deviation__r_0.4",
"loan__large_standard_deviation__r_0.45",
"loan__large_standard_deviation__r_0.5",
"loan__large_standard_deviation__r_0.55",
"loan__large_standard_deviation__r_0.6000000000000001",
"loan__large_standard_deviation__r_0.65",
"loan__large_standard_deviation__r_0.7000000000000001",
"loan__large_standard_deviation__r_0.75",
"loan__large_standard_deviation__r_0.8",
"loan__large_standard_deviation__r_0.8500000000000001",
"loan__large_standard_deviation__r_0.9",
"loan__large_standard_deviation__r_0.9500000000000001",
]
df_sym = load_data(logger, name, features_to_drop)
name = "acf_pacf_set"
features_to_drop = ["loan__partial_autocorrelation__lag_0"]
df_acf_pacf_set = load_data(logger, name, features_to_drop)
name = "cwt_coeff_set"
features_to_drop = []
df_cwt_coeff_set = load_data(logger, name, features_to_drop)
name = "change_quantile_set"
features_to_drop = []
df_change_quantile_set = load_data(logger, name, features_to_drop)
name = "liner_agg_linear_set"
features_to_drop = [
"loan__agg_linear_trend__attr_stderr__chunk_len_50__f_agg_max",
"loan__agg_linear_trend__attr_stderr__chunk_len_50__f_agg_min",
"loan__agg_linear_trend__attr_stderr__chunk_len_50__f_agg_mean",
"loan__agg_linear_trend__attr_stderr__chunk_len_50__f_agg_var",
]
df_liner_agg_linear_set = load_data(logger, name, features_to_drop)
name = "mixed_2_set"
features_to_drop = [
"loan__count_above__t_0",
"loan__query_similarity_count__query_None__threshold_00",
"loan__matrix_profile__feature_min__threshold_098",
"loan__matrix_profile__feature_max__threshold_098",
"loan__matrix_profile__feature_mean__threshold_098",
"loan__matrix_profile__feature_median__threshold_098",
"loan__matrix_profile__feature_25__threshold_098",
"loan__matrix_profile__feature_75__threshold_098",
]
df_mixed_2_set = load_data(logger, name, features_to_drop)
name = "mixed_3_set"
features_to_drop = []
df_mixed_3_set = load_data(logger, name, features_to_drop)
name = "mixed_4_set"
features_to_drop = [
"loan__value_count__value_minus1",
"loan__range_count__max_0__min_10000000000000",
"loan__range_count__max_10000000000000__min_0",
"loan__number_crossing_m__m_minus1",
"loan__ratio_beyond_r_sigma__r_5",
"loan__ratio_beyond_r_sigma__r_6",
"loan__ratio_beyond_r_sigma__r_7",
"loan__ratio_beyond_r_sigma__r_10",
]
df_mixed_4_set = load_data(logger, name, features_to_drop)
name = "fft_real_set"
features_to_drop = [
"loan__fft_coefficient__attr_real__coeff_51",
"loan__fft_coefficient__attr_real__coeff_52",
"loan__fft_coefficient__attr_real__coeff_53",
"loan__fft_coefficient__attr_real__coeff_54",
"loan__fft_coefficient__attr_real__coeff_55",
"loan__fft_coefficient__attr_real__coeff_56",
"loan__fft_coefficient__attr_real__coeff_57",
"loan__fft_coefficient__attr_real__coeff_58",
"loan__fft_coefficient__attr_real__coeff_59",
"loan__fft_coefficient__attr_real__coeff_60",
"loan__fft_coefficient__attr_real__coeff_61",
"loan__fft_coefficient__attr_real__coeff_62",
"loan__fft_coefficient__attr_real__coeff_63",
"loan__fft_coefficient__attr_real__coeff_64",
"loan__fft_coefficient__attr_real__coeff_65",
"loan__fft_coefficient__attr_real__coeff_66",
"loan__fft_coefficient__attr_real__coeff_67",
"loan__fft_coefficient__attr_real__coeff_68",
"loan__fft_coefficient__attr_real__coeff_69",
"loan__fft_coefficient__attr_real__coeff_70",
"loan__fft_coefficient__attr_real__coeff_71",
"loan__fft_coefficient__attr_real__coeff_72",
"loan__fft_coefficient__attr_real__coeff_73",
"loan__fft_coefficient__attr_real__coeff_74",
"loan__fft_coefficient__attr_real__coeff_75",
"loan__fft_coefficient__attr_real__coeff_76",
"loan__fft_coefficient__attr_real__coeff_77",
"loan__fft_coefficient__attr_real__coeff_78",
"loan__fft_coefficient__attr_real__coeff_79",
"loan__fft_coefficient__attr_real__coeff_80",
"loan__fft_coefficient__attr_real__coeff_81",
"loan__fft_coefficient__attr_real__coeff_82",
"loan__fft_coefficient__attr_real__coeff_83",
"loan__fft_coefficient__attr_real__coeff_84",
"loan__fft_coefficient__attr_real__coeff_85",
"loan__fft_coefficient__attr_real__coeff_86",
"loan__fft_coefficient__attr_real__coeff_87",
"loan__fft_coefficient__attr_real__coeff_88",
"loan__fft_coefficient__attr_real__coeff_89",
"loan__fft_coefficient__attr_real__coeff_90",
"loan__fft_coefficient__attr_real__coeff_91",
"loan__fft_coefficient__attr_real__coeff_92",
"loan__fft_coefficient__attr_real__coeff_93",
"loan__fft_coefficient__attr_real__coeff_94",
"loan__fft_coefficient__attr_real__coeff_95",
"loan__fft_coefficient__attr_real__coeff_96",
"loan__fft_coefficient__attr_real__coeff_97",
"loan__fft_coefficient__attr_real__coeff_98",
"loan__fft_coefficient__attr_real__coeff_99",
]
df_fft_real_set = load_data(logger, name, features_to_drop)
name = "fft_imag_set"
features_to_drop = [
"loan__fft_coefficient__attr_imag__coeff_0",
"loan__fft_coefficient__attr_imag__coeff_50",
"loan__fft_coefficient__attr_imag__coeff_51",
"loan__fft_coefficient__attr_imag__coeff_52",
"loan__fft_coefficient__attr_imag__coeff_53",
"loan__fft_coefficient__attr_imag__coeff_54",
"loan__fft_coefficient__attr_imag__coeff_55",
"loan__fft_coefficient__attr_imag__coeff_56",
"loan__fft_coefficient__attr_imag__coeff_57",
"loan__fft_coefficient__attr_imag__coeff_58",
"loan__fft_coefficient__attr_imag__coeff_59",
"loan__fft_coefficient__attr_imag__coeff_60",
"loan__fft_coefficient__attr_imag__coeff_61",
"loan__fft_coefficient__attr_imag__coeff_62",
"loan__fft_coefficient__attr_imag__coeff_63",
"loan__fft_coefficient__attr_imag__coeff_64",
"loan__fft_coefficient__attr_imag__coeff_65",
"loan__fft_coefficient__attr_imag__coeff_66",
"loan__fft_coefficient__attr_imag__coeff_67",
"loan__fft_coefficient__attr_imag__coeff_68",
"loan__fft_coefficient__attr_imag__coeff_69",
"loan__fft_coefficient__attr_imag__coeff_70",
"loan__fft_coefficient__attr_imag__coeff_71",
"loan__fft_coefficient__attr_imag__coeff_72",
"loan__fft_coefficient__attr_imag__coeff_73",
"loan__fft_coefficient__attr_imag__coeff_74",
"loan__fft_coefficient__attr_imag__coeff_75",
"loan__fft_coefficient__attr_imag__coeff_76",
"loan__fft_coefficient__attr_imag__coeff_77",
"loan__fft_coefficient__attr_imag__coeff_78",
"loan__fft_coefficient__attr_imag__coeff_79",
"loan__fft_coefficient__attr_imag__coeff_80",
"loan__fft_coefficient__attr_imag__coeff_81",
"loan__fft_coefficient__attr_imag__coeff_82",
"loan__fft_coefficient__attr_imag__coeff_83",
"loan__fft_coefficient__attr_imag__coeff_84",
"loan__fft_coefficient__attr_imag__coeff_85",
"loan__fft_coefficient__attr_imag__coeff_86",
"loan__fft_coefficient__attr_imag__coeff_87",
"loan__fft_coefficient__attr_imag__coeff_88",
"loan__fft_coefficient__attr_imag__coeff_89",
"loan__fft_coefficient__attr_imag__coeff_90",
"loan__fft_coefficient__attr_imag__coeff_91",
"loan__fft_coefficient__attr_imag__coeff_92",
"loan__fft_coefficient__attr_imag__coeff_93",
"loan__fft_coefficient__attr_imag__coeff_94",
"loan__fft_coefficient__attr_imag__coeff_95",
"loan__fft_coefficient__attr_imag__coeff_96",
"loan__fft_coefficient__attr_imag__coeff_97",
"loan__fft_coefficient__attr_imag__coeff_98",
"loan__fft_coefficient__attr_imag__coeff_99",
]
df_fft_imag_set = load_data(logger, name, features_to_drop)
name = "fft_abs_set"
features_to_drop = [
"loan__fft_coefficient__attr_abs__coeff_51",
"loan__fft_coefficient__attr_abs__coeff_52",
"loan__fft_coefficient__attr_abs__coeff_53",
"loan__fft_coefficient__attr_abs__coeff_54",
"loan__fft_coefficient__attr_abs__coeff_55",
"loan__fft_coefficient__attr_abs__coeff_56",
"loan__fft_coefficient__attr_abs__coeff_57",
"loan__fft_coefficient__attr_abs__coeff_58",
"loan__fft_coefficient__attr_abs__coeff_59",
"loan__fft_coefficient__attr_abs__coeff_60",
"loan__fft_coefficient__attr_abs__coeff_61",
"loan__fft_coefficient__attr_abs__coeff_62",
"loan__fft_coefficient__attr_abs__coeff_63",
"loan__fft_coefficient__attr_abs__coeff_64",
"loan__fft_coefficient__attr_abs__coeff_65",
"loan__fft_coefficient__attr_abs__coeff_66",
"loan__fft_coefficient__attr_abs__coeff_67",
"loan__fft_coefficient__attr_abs__coeff_68",
"loan__fft_coefficient__attr_abs__coeff_69",
"loan__fft_coefficient__attr_abs__coeff_70",
"loan__fft_coefficient__attr_abs__coeff_71",
"loan__fft_coefficient__attr_abs__coeff_72",
"loan__fft_coefficient__attr_abs__coeff_73",
"loan__fft_coefficient__attr_abs__coeff_74",
"loan__fft_coefficient__attr_abs__coeff_75",
"loan__fft_coefficient__attr_abs__coeff_76",
"loan__fft_coefficient__attr_abs__coeff_77",
"loan__fft_coefficient__attr_abs__coeff_78",
"loan__fft_coefficient__attr_abs__coeff_79",
"loan__fft_coefficient__attr_abs__coeff_80",
"loan__fft_coefficient__attr_abs__coeff_81",
"loan__fft_coefficient__attr_abs__coeff_82",
"loan__fft_coefficient__attr_abs__coeff_83",
"loan__fft_coefficient__attr_abs__coeff_84",
"loan__fft_coefficient__attr_abs__coeff_85",
"loan__fft_coefficient__attr_abs__coeff_86",
"loan__fft_coefficient__attr_abs__coeff_87",
"loan__fft_coefficient__attr_abs__coeff_88",
"loan__fft_coefficient__attr_abs__coeff_89",
"loan__fft_coefficient__attr_abs__coeff_90",
"loan__fft_coefficient__attr_abs__coeff_91",
"loan__fft_coefficient__attr_abs__coeff_92",
"loan__fft_coefficient__attr_abs__coeff_93",
"loan__fft_coefficient__attr_abs__coeff_94",
"loan__fft_coefficient__attr_abs__coeff_95",
"loan__fft_coefficient__attr_abs__coeff_96",
"loan__fft_coefficient__attr_abs__coeff_97",
"loan__fft_coefficient__attr_abs__coeff_98",
"loan__fft_coefficient__attr_abs__coeff_99",
]
df_fft_abs_set = load_data(logger, name, features_to_drop)
name = "fft_angle_set"
features_to_drop = [
"loan__fft_coefficient__attr_angle__coeff_0",
"loan__fft_coefficient__attr_angle__coeff_51",
"loan__fft_coefficient__attr_angle__coeff_52",
"loan__fft_coefficient__attr_angle__coeff_53",
"loan__fft_coefficient__attr_angle__coeff_54",
"loan__fft_coefficient__attr_angle__coeff_55",
"loan__fft_coefficient__attr_angle__coeff_56",
"loan__fft_coefficient__attr_angle__coeff_57",
"loan__fft_coefficient__attr_angle__coeff_58",
"loan__fft_coefficient__attr_angle__coeff_59",
"loan__fft_coefficient__attr_angle__coeff_60",
"loan__fft_coefficient__attr_angle__coeff_61",
"loan__fft_coefficient__attr_angle__coeff_62",
"loan__fft_coefficient__attr_angle__coeff_63",
"loan__fft_coefficient__attr_angle__coeff_64",
"loan__fft_coefficient__attr_angle__coeff_65",
"loan__fft_coefficient__attr_angle__coeff_66",
"loan__fft_coefficient__attr_angle__coeff_67",
"loan__fft_coefficient__attr_angle__coeff_68",
"loan__fft_coefficient__attr_angle__coeff_69",
"loan__fft_coefficient__attr_angle__coeff_70",
"loan__fft_coefficient__attr_angle__coeff_71",
"loan__fft_coefficient__attr_angle__coeff_72",
"loan__fft_coefficient__attr_angle__coeff_73",
"loan__fft_coefficient__attr_angle__coeff_74",
"loan__fft_coefficient__attr_angle__coeff_75",
"loan__fft_coefficient__attr_angle__coeff_76",
"loan__fft_coefficient__attr_angle__coeff_77",
"loan__fft_coefficient__attr_angle__coeff_78",
"loan__fft_coefficient__attr_angle__coeff_79",
"loan__fft_coefficient__attr_angle__coeff_80",
"loan__fft_coefficient__attr_angle__coeff_81",
"loan__fft_coefficient__attr_angle__coeff_82",
"loan__fft_coefficient__attr_angle__coeff_83",
"loan__fft_coefficient__attr_angle__coeff_84",
"loan__fft_coefficient__attr_angle__coeff_85",
"loan__fft_coefficient__attr_angle__coeff_86",
"loan__fft_coefficient__attr_angle__coeff_87",
"loan__fft_coefficient__attr_angle__coeff_88",
"loan__fft_coefficient__attr_angle__coeff_89",
"loan__fft_coefficient__attr_angle__coeff_90",
"loan__fft_coefficient__attr_angle__coeff_91",
"loan__fft_coefficient__attr_angle__coeff_92",
"loan__fft_coefficient__attr_angle__coeff_93",
"loan__fft_coefficient__attr_angle__coeff_94",
"loan__fft_coefficient__attr_angle__coeff_95",
"loan__fft_coefficient__attr_angle__coeff_96",
"loan__fft_coefficient__attr_angle__coeff_97",
"loan__fft_coefficient__attr_angle__coeff_98",
"loan__fft_coefficient__attr_angle__coeff_99",
]
df_fft_angle_set = load_data(logger, name, features_to_drop)
dfs = [
df_acf_pacf_set,
df_change_quantile_set,
df_cwt_coeff_set,
df_fft_abs_set,
df_fft_angle_set,
df_fft_imag_set,
df_fft_real_set,
df_liner_agg_linear_set,
df_mixed_1_set,
df_mixed_2_set,
df_mixed_3_set,
df_mixed_4_set,
df_sym,
]
result_df = pd.concat(dfs, axis=1)
logger.info(f"Shape of the combined Data Frame {result_df.shape}")
return result_df
if __name__ == "__main__":
# Create a Stream only logger
logger = common.get_logger("generate_features")
logger.info("Starting to generate features")
results_df = combine_features(logger=logger)
logger.info(
f"Writing the combined parquet to {constants.FEATURES_DATA_DIR}/cast/tsfresh_f_merged.parquet"
)
results_df.to_parquet(
f"{constants.FEATURES_DATA_DIR}/cast/tsfresh_f_merged.parquet", index=True
)
| 44.924084
| 102
| 0.757823
| 2,290
| 17,161
| 4.611354
| 0.0869
| 0.131913
| 0.339205
| 0.414583
| 0.858239
| 0.795644
| 0.110227
| 0.101231
| 0.074811
| 0.044318
| 0
| 0.055182
| 0.177379
| 17,161
| 381
| 103
| 45.041995
| 0.692853
| 0.006993
| 0
| 0.037249
| 1
| 0
| 0.669113
| 0.640463
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008596
| false
| 0
| 0.008596
| 0
| 0.025788
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ad5e794854817e95b07ec1b47f801079caf75efc
| 36
|
py
|
Python
|
tests/sample.py
|
VoshVolk/public_python
|
8480f8220531534268f42449c66ebbfd3011bf6e
|
[
"Apache-2.0"
] | 1
|
2021-11-08T08:09:29.000Z
|
2021-11-08T08:09:29.000Z
|
tests/sample.py
|
VoshVolk/public_python
|
8480f8220531534268f42449c66ebbfd3011bf6e
|
[
"Apache-2.0"
] | null | null | null |
tests/sample.py
|
VoshVolk/public_python
|
8480f8220531534268f42449c66ebbfd3011bf6e
|
[
"Apache-2.0"
] | null | null | null |
def first_entry():
return "a"
| 7.2
| 18
| 0.583333
| 5
| 36
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 36
| 5
| 19
| 7.2
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
ad73b98e5937ee7641afdacd13e0f2d6675bd2e5
| 2,291
|
py
|
Python
|
pynncml/metrics/regression.py
|
haihabi/PyNNcml
|
808892da798913928fbc219cbb5f9e41156d9d49
|
[
"MIT"
] | 4
|
2020-06-28T22:52:19.000Z
|
2021-10-31T10:19:51.000Z
|
pynncml/metrics/regression.py
|
haihabi/PyNNcml
|
808892da798913928fbc219cbb5f9e41156d9d49
|
[
"MIT"
] | null | null | null |
pynncml/metrics/regression.py
|
haihabi/PyNNcml
|
808892da798913928fbc219cbb5f9e41156d9d49
|
[
"MIT"
] | 4
|
2020-06-28T22:52:24.000Z
|
2021-11-04T00:28:42.000Z
|
import numpy as np
def mse(input_array: np.ndarray, reference_array: np.ndarray) -> float:
r"""
The mse function compute the mean square error of predication array.
.. math::
mse=\frac{1}{N}\sum_i^N (p_i-r_i)^2
where mse is the mean square error measurement, p is the predication array, r is the reference array.
Note:reference array shape must be equal to input array shape
:param input_array: A numpy array of any shape
:param reference_array: A numpy array of any shape
:return: a floating point number that represent the mean square error measurement
"""
return float(np.mean(np.power(input_array - reference_array, 2)))
def nmse(input_array: np.ndarray, reference_array: np.ndarray, epsilon: float = 0.00001) -> float:
r"""
The nmse function compute the normalized mean square error of predication array.
.. math::
nmse=\frac{1}{N}\sum_i^N \frac{(p_i-r_i)^2}{r_i^2+\epsilon}
where nmse is the normalized mean square error measurement, p is the predication array, r is the reference array
and epsilon is a floating point number fo numeric stability.
Note:reference array shape must be equal to input array shape
:param input_array: A numpy array of any shape
:param reference_array: A numpy array of any shape
:param epsilon: a floating point number fo numric stabiliy
:return: a floating point number that represent the normalized mean square error measurement
"""
return float(np.mean(np.power(input_array - reference_array, 2) / (epsilon + np.power(reference_array, 2))))
def rmse(input_array: np.ndarray, reference_array: np.ndarray) -> float:
r"""
The rmse function compute the mean square error of predication array.
.. math::
mse=\sqrt{\frac{1}{N}\sum_i^N (p_i-r_i)^2}
where mse is the mean square error measurement, p is the predication array, r is the reference array.
Note:reference array shape must be equal to input array shape
:param input_array: A numpy array of any shape
:param reference_array: A numpy array of any shape
:return: a floating point number that represent the mean square error measurement
"""
return float(np.sqrt(np.mean(np.power(input_array - reference_array, 2))))
| 44.057692
| 116
| 0.709734
| 362
| 2,291
| 4.411602
| 0.165746
| 0.140263
| 0.084534
| 0.067627
| 0.872887
| 0.834064
| 0.812774
| 0.789606
| 0.733876
| 0.710081
| 0
| 0.009377
| 0.208643
| 2,291
| 51
| 117
| 44.921569
| 0.871484
| 0.700131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0.1
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
d159bb5b3249c9ecab80c7d7f8d8e48ce50482d4
| 28,733
|
py
|
Python
|
sdk/python/pulumi_github/repository_pull_request.py
|
pulumi/pulumi-github
|
303ed7a28cbfe6ba1db75b3b365dcfa0b00e6e91
|
[
"ECL-2.0",
"Apache-2.0"
] | 20
|
2020-04-27T15:05:01.000Z
|
2022-02-08T00:28:32.000Z
|
sdk/python/pulumi_github/repository_pull_request.py
|
pulumi/pulumi-github
|
303ed7a28cbfe6ba1db75b3b365dcfa0b00e6e91
|
[
"ECL-2.0",
"Apache-2.0"
] | 103
|
2020-05-01T17:36:32.000Z
|
2022-03-31T15:26:35.000Z
|
sdk/python/pulumi_github/repository_pull_request.py
|
pulumi/pulumi-github
|
303ed7a28cbfe6ba1db75b3b365dcfa0b00e6e91
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2020-06-24T19:15:02.000Z
|
2021-11-26T08:05:46.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['RepositoryPullRequestArgs', 'RepositoryPullRequest']
@pulumi.input_type
class RepositoryPullRequestArgs:
def __init__(__self__, *,
base_ref: pulumi.Input[str],
base_repository: pulumi.Input[str],
head_ref: pulumi.Input[str],
title: pulumi.Input[str],
body: Optional[pulumi.Input[str]] = None,
maintainer_can_modify: Optional[pulumi.Input[bool]] = None,
owner: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a RepositoryPullRequest resource.
:param pulumi.Input[str] base_ref: Name of the branch serving as the base of the Pull Request.
:param pulumi.Input[str] base_repository: Name of the base repository to retrieve the Pull Requests from.
:param pulumi.Input[str] head_ref: Name of the branch serving as the head of the Pull Request.
:param pulumi.Input[str] title: The title of the Pull Request.
:param pulumi.Input[str] body: Body of the Pull Request.
:param pulumi.Input[bool] maintainer_can_modify: Controls whether the base repository maintainers can modify the Pull Request. Default: false.
:param pulumi.Input[str] owner: Owner of the repository. If not provided, the provider's default owner is used.
"""
pulumi.set(__self__, "base_ref", base_ref)
pulumi.set(__self__, "base_repository", base_repository)
pulumi.set(__self__, "head_ref", head_ref)
pulumi.set(__self__, "title", title)
if body is not None:
pulumi.set(__self__, "body", body)
if maintainer_can_modify is not None:
pulumi.set(__self__, "maintainer_can_modify", maintainer_can_modify)
if owner is not None:
pulumi.set(__self__, "owner", owner)
@property
@pulumi.getter(name="baseRef")
def base_ref(self) -> pulumi.Input[str]:
"""
Name of the branch serving as the base of the Pull Request.
"""
return pulumi.get(self, "base_ref")
@base_ref.setter
def base_ref(self, value: pulumi.Input[str]):
pulumi.set(self, "base_ref", value)
@property
@pulumi.getter(name="baseRepository")
def base_repository(self) -> pulumi.Input[str]:
"""
Name of the base repository to retrieve the Pull Requests from.
"""
return pulumi.get(self, "base_repository")
@base_repository.setter
def base_repository(self, value: pulumi.Input[str]):
pulumi.set(self, "base_repository", value)
@property
@pulumi.getter(name="headRef")
def head_ref(self) -> pulumi.Input[str]:
"""
Name of the branch serving as the head of the Pull Request.
"""
return pulumi.get(self, "head_ref")
@head_ref.setter
def head_ref(self, value: pulumi.Input[str]):
pulumi.set(self, "head_ref", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
"""
The title of the Pull Request.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def body(self) -> Optional[pulumi.Input[str]]:
"""
Body of the Pull Request.
"""
return pulumi.get(self, "body")
@body.setter
def body(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "body", value)
@property
@pulumi.getter(name="maintainerCanModify")
def maintainer_can_modify(self) -> Optional[pulumi.Input[bool]]:
"""
Controls whether the base repository maintainers can modify the Pull Request. Default: false.
"""
return pulumi.get(self, "maintainer_can_modify")
@maintainer_can_modify.setter
def maintainer_can_modify(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "maintainer_can_modify", value)
@property
@pulumi.getter
def owner(self) -> Optional[pulumi.Input[str]]:
"""
Owner of the repository. If not provided, the provider's default owner is used.
"""
return pulumi.get(self, "owner")
@owner.setter
def owner(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "owner", value)
@pulumi.input_type
class _RepositoryPullRequestState:
def __init__(__self__, *,
base_ref: Optional[pulumi.Input[str]] = None,
base_repository: Optional[pulumi.Input[str]] = None,
base_sha: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
draft: Optional[pulumi.Input[bool]] = None,
head_ref: Optional[pulumi.Input[str]] = None,
head_sha: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
maintainer_can_modify: Optional[pulumi.Input[bool]] = None,
number: Optional[pulumi.Input[int]] = None,
opened_at: Optional[pulumi.Input[int]] = None,
opened_by: Optional[pulumi.Input[str]] = None,
owner: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None,
updated_at: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering RepositoryPullRequest resources.
:param pulumi.Input[str] base_ref: Name of the branch serving as the base of the Pull Request.
:param pulumi.Input[str] base_repository: Name of the base repository to retrieve the Pull Requests from.
:param pulumi.Input[str] base_sha: Head commit SHA of the Pull Request base.
:param pulumi.Input[str] body: Body of the Pull Request.
:param pulumi.Input[bool] draft: Indicates Whether this Pull Request is a draft.
:param pulumi.Input[str] head_ref: Name of the branch serving as the head of the Pull Request.
:param pulumi.Input[str] head_sha: Head commit SHA of the Pull Request head.
:param pulumi.Input[Sequence[pulumi.Input[str]]] labels: List of label names set on the Pull Request.
:param pulumi.Input[bool] maintainer_can_modify: Controls whether the base repository maintainers can modify the Pull Request. Default: false.
:param pulumi.Input[int] number: The number of the Pull Request within the repository.
:param pulumi.Input[int] opened_at: Unix timestamp indicating the Pull Request creation time.
:param pulumi.Input[str] opened_by: GitHub login of the user who opened the Pull Request.
:param pulumi.Input[str] owner: Owner of the repository. If not provided, the provider's default owner is used.
:param pulumi.Input[str] state: the current Pull Request state - can be "open", "closed" or "merged".
:param pulumi.Input[str] title: The title of the Pull Request.
:param pulumi.Input[int] updated_at: The timestamp of the last Pull Request update.
"""
if base_ref is not None:
pulumi.set(__self__, "base_ref", base_ref)
if base_repository is not None:
pulumi.set(__self__, "base_repository", base_repository)
if base_sha is not None:
pulumi.set(__self__, "base_sha", base_sha)
if body is not None:
pulumi.set(__self__, "body", body)
if draft is not None:
pulumi.set(__self__, "draft", draft)
if head_ref is not None:
pulumi.set(__self__, "head_ref", head_ref)
if head_sha is not None:
pulumi.set(__self__, "head_sha", head_sha)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if maintainer_can_modify is not None:
pulumi.set(__self__, "maintainer_can_modify", maintainer_can_modify)
if number is not None:
pulumi.set(__self__, "number", number)
if opened_at is not None:
pulumi.set(__self__, "opened_at", opened_at)
if opened_by is not None:
pulumi.set(__self__, "opened_by", opened_by)
if owner is not None:
pulumi.set(__self__, "owner", owner)
if state is not None:
pulumi.set(__self__, "state", state)
if title is not None:
pulumi.set(__self__, "title", title)
if updated_at is not None:
pulumi.set(__self__, "updated_at", updated_at)
@property
@pulumi.getter(name="baseRef")
def base_ref(self) -> Optional[pulumi.Input[str]]:
"""
Name of the branch serving as the base of the Pull Request.
"""
return pulumi.get(self, "base_ref")
@base_ref.setter
def base_ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "base_ref", value)
@property
@pulumi.getter(name="baseRepository")
def base_repository(self) -> Optional[pulumi.Input[str]]:
"""
Name of the base repository to retrieve the Pull Requests from.
"""
return pulumi.get(self, "base_repository")
@base_repository.setter
def base_repository(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "base_repository", value)
@property
@pulumi.getter(name="baseSha")
def base_sha(self) -> Optional[pulumi.Input[str]]:
"""
Head commit SHA of the Pull Request base.
"""
return pulumi.get(self, "base_sha")
@base_sha.setter
def base_sha(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "base_sha", value)
@property
@pulumi.getter
def body(self) -> Optional[pulumi.Input[str]]:
"""
Body of the Pull Request.
"""
return pulumi.get(self, "body")
@body.setter
def body(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "body", value)
@property
@pulumi.getter
def draft(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates Whether this Pull Request is a draft.
"""
return pulumi.get(self, "draft")
@draft.setter
def draft(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "draft", value)
@property
@pulumi.getter(name="headRef")
def head_ref(self) -> Optional[pulumi.Input[str]]:
"""
Name of the branch serving as the head of the Pull Request.
"""
return pulumi.get(self, "head_ref")
@head_ref.setter
def head_ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "head_ref", value)
@property
@pulumi.getter(name="headSha")
def head_sha(self) -> Optional[pulumi.Input[str]]:
"""
Head commit SHA of the Pull Request head.
"""
return pulumi.get(self, "head_sha")
@head_sha.setter
def head_sha(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "head_sha", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of label names set on the Pull Request.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="maintainerCanModify")
def maintainer_can_modify(self) -> Optional[pulumi.Input[bool]]:
"""
Controls whether the base repository maintainers can modify the Pull Request. Default: false.
"""
return pulumi.get(self, "maintainer_can_modify")
@maintainer_can_modify.setter
def maintainer_can_modify(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "maintainer_can_modify", value)
@property
@pulumi.getter
def number(self) -> Optional[pulumi.Input[int]]:
"""
The number of the Pull Request within the repository.
"""
return pulumi.get(self, "number")
@number.setter
def number(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number", value)
@property
@pulumi.getter(name="openedAt")
def opened_at(self) -> Optional[pulumi.Input[int]]:
"""
Unix timestamp indicating the Pull Request creation time.
"""
return pulumi.get(self, "opened_at")
@opened_at.setter
def opened_at(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "opened_at", value)
@property
@pulumi.getter(name="openedBy")
def opened_by(self) -> Optional[pulumi.Input[str]]:
"""
GitHub login of the user who opened the Pull Request.
"""
return pulumi.get(self, "opened_by")
@opened_by.setter
def opened_by(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "opened_by", value)
@property
@pulumi.getter
def owner(self) -> Optional[pulumi.Input[str]]:
"""
Owner of the repository. If not provided, the provider's default owner is used.
"""
return pulumi.get(self, "owner")
@owner.setter
def owner(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "owner", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
the current Pull Request state - can be "open", "closed" or "merged".
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
The title of the Pull Request.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> Optional[pulumi.Input[int]]:
"""
The timestamp of the last Pull Request update.
"""
return pulumi.get(self, "updated_at")
@updated_at.setter
def updated_at(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "updated_at", value)
class RepositoryPullRequest(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
base_ref: Optional[pulumi.Input[str]] = None,
base_repository: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
head_ref: Optional[pulumi.Input[str]] = None,
maintainer_can_modify: Optional[pulumi.Input[bool]] = None,
owner: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource allows you to create and manage PullRequests for repositories within your GitHub organization or personal account.
## Example Usage
```python
import pulumi
import pulumi_github as github
example = github.RepositoryPullRequest("example",
base_ref="main",
base_repository="example-repository",
body="This will change everything",
head_ref="feature-branch",
title="My newest feature")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] base_ref: Name of the branch serving as the base of the Pull Request.
:param pulumi.Input[str] base_repository: Name of the base repository to retrieve the Pull Requests from.
:param pulumi.Input[str] body: Body of the Pull Request.
:param pulumi.Input[str] head_ref: Name of the branch serving as the head of the Pull Request.
:param pulumi.Input[bool] maintainer_can_modify: Controls whether the base repository maintainers can modify the Pull Request. Default: false.
:param pulumi.Input[str] owner: Owner of the repository. If not provided, the provider's default owner is used.
:param pulumi.Input[str] title: The title of the Pull Request.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RepositoryPullRequestArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource allows you to create and manage PullRequests for repositories within your GitHub organization or personal account.
## Example Usage
```python
import pulumi
import pulumi_github as github
example = github.RepositoryPullRequest("example",
base_ref="main",
base_repository="example-repository",
body="This will change everything",
head_ref="feature-branch",
title="My newest feature")
```
:param str resource_name: The name of the resource.
:param RepositoryPullRequestArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RepositoryPullRequestArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
base_ref: Optional[pulumi.Input[str]] = None,
base_repository: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
head_ref: Optional[pulumi.Input[str]] = None,
maintainer_can_modify: Optional[pulumi.Input[bool]] = None,
owner: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RepositoryPullRequestArgs.__new__(RepositoryPullRequestArgs)
if base_ref is None and not opts.urn:
raise TypeError("Missing required property 'base_ref'")
__props__.__dict__["base_ref"] = base_ref
if base_repository is None and not opts.urn:
raise TypeError("Missing required property 'base_repository'")
__props__.__dict__["base_repository"] = base_repository
__props__.__dict__["body"] = body
if head_ref is None and not opts.urn:
raise TypeError("Missing required property 'head_ref'")
__props__.__dict__["head_ref"] = head_ref
__props__.__dict__["maintainer_can_modify"] = maintainer_can_modify
__props__.__dict__["owner"] = owner
if title is None and not opts.urn:
raise TypeError("Missing required property 'title'")
__props__.__dict__["title"] = title
__props__.__dict__["base_sha"] = None
__props__.__dict__["draft"] = None
__props__.__dict__["head_sha"] = None
__props__.__dict__["labels"] = None
__props__.__dict__["number"] = None
__props__.__dict__["opened_at"] = None
__props__.__dict__["opened_by"] = None
__props__.__dict__["state"] = None
__props__.__dict__["updated_at"] = None
super(RepositoryPullRequest, __self__).__init__(
'github:index/repositoryPullRequest:RepositoryPullRequest',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
base_ref: Optional[pulumi.Input[str]] = None,
base_repository: Optional[pulumi.Input[str]] = None,
base_sha: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
draft: Optional[pulumi.Input[bool]] = None,
head_ref: Optional[pulumi.Input[str]] = None,
head_sha: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
maintainer_can_modify: Optional[pulumi.Input[bool]] = None,
number: Optional[pulumi.Input[int]] = None,
opened_at: Optional[pulumi.Input[int]] = None,
opened_by: Optional[pulumi.Input[str]] = None,
owner: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None,
updated_at: Optional[pulumi.Input[int]] = None) -> 'RepositoryPullRequest':
"""
Get an existing RepositoryPullRequest resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] base_ref: Name of the branch serving as the base of the Pull Request.
:param pulumi.Input[str] base_repository: Name of the base repository to retrieve the Pull Requests from.
:param pulumi.Input[str] base_sha: Head commit SHA of the Pull Request base.
:param pulumi.Input[str] body: Body of the Pull Request.
:param pulumi.Input[bool] draft: Indicates Whether this Pull Request is a draft.
:param pulumi.Input[str] head_ref: Name of the branch serving as the head of the Pull Request.
:param pulumi.Input[str] head_sha: Head commit SHA of the Pull Request head.
:param pulumi.Input[Sequence[pulumi.Input[str]]] labels: List of label names set on the Pull Request.
:param pulumi.Input[bool] maintainer_can_modify: Controls whether the base repository maintainers can modify the Pull Request. Default: false.
:param pulumi.Input[int] number: The number of the Pull Request within the repository.
:param pulumi.Input[int] opened_at: Unix timestamp indicating the Pull Request creation time.
:param pulumi.Input[str] opened_by: GitHub login of the user who opened the Pull Request.
:param pulumi.Input[str] owner: Owner of the repository. If not provided, the provider's default owner is used.
:param pulumi.Input[str] state: the current Pull Request state - can be "open", "closed" or "merged".
:param pulumi.Input[str] title: The title of the Pull Request.
:param pulumi.Input[int] updated_at: The timestamp of the last Pull Request update.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RepositoryPullRequestState.__new__(_RepositoryPullRequestState)
__props__.__dict__["base_ref"] = base_ref
__props__.__dict__["base_repository"] = base_repository
__props__.__dict__["base_sha"] = base_sha
__props__.__dict__["body"] = body
__props__.__dict__["draft"] = draft
__props__.__dict__["head_ref"] = head_ref
__props__.__dict__["head_sha"] = head_sha
__props__.__dict__["labels"] = labels
__props__.__dict__["maintainer_can_modify"] = maintainer_can_modify
__props__.__dict__["number"] = number
__props__.__dict__["opened_at"] = opened_at
__props__.__dict__["opened_by"] = opened_by
__props__.__dict__["owner"] = owner
__props__.__dict__["state"] = state
__props__.__dict__["title"] = title
__props__.__dict__["updated_at"] = updated_at
return RepositoryPullRequest(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="baseRef")
def base_ref(self) -> pulumi.Output[str]:
"""
Name of the branch serving as the base of the Pull Request.
"""
return pulumi.get(self, "base_ref")
@property
@pulumi.getter(name="baseRepository")
def base_repository(self) -> pulumi.Output[str]:
"""
Name of the base repository to retrieve the Pull Requests from.
"""
return pulumi.get(self, "base_repository")
@property
@pulumi.getter(name="baseSha")
def base_sha(self) -> pulumi.Output[str]:
"""
Head commit SHA of the Pull Request base.
"""
return pulumi.get(self, "base_sha")
@property
@pulumi.getter
def body(self) -> pulumi.Output[Optional[str]]:
"""
Body of the Pull Request.
"""
return pulumi.get(self, "body")
@property
@pulumi.getter
def draft(self) -> pulumi.Output[bool]:
"""
Indicates Whether this Pull Request is a draft.
"""
return pulumi.get(self, "draft")
@property
@pulumi.getter(name="headRef")
def head_ref(self) -> pulumi.Output[str]:
"""
Name of the branch serving as the head of the Pull Request.
"""
return pulumi.get(self, "head_ref")
@property
@pulumi.getter(name="headSha")
def head_sha(self) -> pulumi.Output[str]:
"""
Head commit SHA of the Pull Request head.
"""
return pulumi.get(self, "head_sha")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Sequence[str]]:
"""
List of label names set on the Pull Request.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="maintainerCanModify")
def maintainer_can_modify(self) -> pulumi.Output[Optional[bool]]:
"""
Controls whether the base repository maintainers can modify the Pull Request. Default: false.
"""
return pulumi.get(self, "maintainer_can_modify")
@property
@pulumi.getter
def number(self) -> pulumi.Output[int]:
"""
The number of the Pull Request within the repository.
"""
return pulumi.get(self, "number")
@property
@pulumi.getter(name="openedAt")
def opened_at(self) -> pulumi.Output[int]:
"""
Unix timestamp indicating the Pull Request creation time.
"""
return pulumi.get(self, "opened_at")
@property
@pulumi.getter(name="openedBy")
def opened_by(self) -> pulumi.Output[str]:
"""
GitHub login of the user who opened the Pull Request.
"""
return pulumi.get(self, "opened_by")
@property
@pulumi.getter
def owner(self) -> pulumi.Output[Optional[str]]:
"""
Owner of the repository. If not provided, the provider's default owner is used.
"""
return pulumi.get(self, "owner")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
the current Pull Request state - can be "open", "closed" or "merged".
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def title(self) -> pulumi.Output[str]:
"""
The title of the Pull Request.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> pulumi.Output[int]:
"""
The timestamp of the last Pull Request update.
"""
return pulumi.get(self, "updated_at")
| 40.242297
| 150
| 0.628789
| 3,474
| 28,733
| 4.986471
| 0.059298
| 0.098424
| 0.088899
| 0.073659
| 0.852855
| 0.822952
| 0.786873
| 0.752352
| 0.736882
| 0.702419
| 0
| 0.000047
| 0.263913
| 28,733
| 713
| 151
| 40.298738
| 0.819007
| 0.288727
| 0
| 0.611511
| 1
| 0
| 0.087051
| 0.016591
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165468
| false
| 0.002398
| 0.01199
| 0
| 0.280576
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0f671ec5ae5d7110499cfacf5a341c8c1013e3ad
| 26,956
|
py
|
Python
|
ats/tests/tests_views.py
|
dictoss/active-task-summary
|
1febb3b9e9e2a4e83a555cecfab374eb5eeaa816
|
[
"BSD-2-Clause"
] | 2
|
2016-11-11T00:07:42.000Z
|
2017-04-23T10:58:43.000Z
|
ats/tests/tests_views.py
|
dictoss/active-task-summary
|
1febb3b9e9e2a4e83a555cecfab374eb5eeaa816
|
[
"BSD-2-Clause"
] | 43
|
2015-04-02T13:06:37.000Z
|
2022-02-19T14:30:57.000Z
|
ats/tests/tests_views.py
|
dictoss/active-task-summary
|
1febb3b9e9e2a4e83a555cecfab374eb5eeaa816
|
[
"BSD-2-Clause"
] | null | null | null |
from django.test import TestCase, Client, RequestFactory
from django.urls import reverse
from django.contrib.auth.models import User
from django.db import IntegrityError
import datetime
from datetime import timedelta
import pytz
from ..views import (
format_totaltime,
format_hours_float,
get_projects_in_date,
error404,
error500,
index,
login_view,
logout_view)
from ..models import (
Job,
Task,
Project,
ProjectWorker,
UsedTaskTime)
class AtsTestClient(Client):
pass
class AtsViewTestCase(TestCase):
pass
class AtsViewFuncTestClient(TestCase):
fixtures = ['test_views.json']
def test_get_projects_in_date(self):
_ret = None
_user = User.objects.get(username='testuser100')
_ret = get_projects_in_date(_user, '2014-01-30')
self.assertIsNotNone(_ret)
class Ats404ViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = ''
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_404(self):
_response = self.client.get('/ats/zzz/')
self.assertEqual(_response.status_code, 404)
_request = self.factory.get('/ats/zzz/')
_responsev = error404(_request)
self.assertEqual(_responsev.status_code, 404)
self.assertTrue(_responsev.content.find(b'404 NOT FOUND'))
class Ats500ViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:error_internal'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_500(self):
try:
_response = self.client.get(reverse(self.view_name))
except Exception as e:
pass
else:
self.fail()
class IndexViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:index'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_index(self):
_response = self.client.get(reverse('ats:top'))
self.assertEqual(_response.status_code, 302)
_request = self.factory.get(reverse('ats:top'))
_responsev = index(_request)
self.assertEqual(_responsev.status_code, 302)
class LoginViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = ''
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_login_success(self):
self.client.logout()
# if not login
_response = self.client.get(reverse('ats:login_view'))
self.assertEqual(_response.status_code, 200)
# login
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse('ats:login_view'))
self.assertRedirects(_response,
expected_url=reverse('ats:top'),
status_code=302,
target_status_code=200)
# logout
_response = self.client.get(reverse('ats:logout_view'))
self.assertRedirects(_response,
expected_url=reverse('ats:login_view'),
status_code=302,
target_status_code=200)
def test_login_success_has_next(self):
self.client.logout()
# login has next.
_url = reverse('ats:login_view')
_response = self.client.post(
'%s?next=%s' % (_url, reverse('ats:regist')),
{'username': self.user.username,
'password': self._password})
self.assertRedirects(_response,
expected_url=reverse('ats:regist'),
status_code=302,
target_status_code=200)
# logout
_response = self.client.get(reverse('ats:logout_view'))
self.assertRedirects(_response,
expected_url=reverse('ats:login_view'),
status_code=302,
target_status_code=200)
def test_loginform(self):
# success login
_response = self.client.post(
reverse('ats:login_view'),
{'username': self.user.username,
'password': self._password})
self.assertRedirects(_response,
expected_url=reverse('ats:top'),
status_code=302,
target_status_code=200)
# wrong password
_response = self.client.post(
reverse('ats:login_view'),
{'username': self.user.username,
'password': 'dummypass'})
self.assertEqual(_response.status_code, 200)
# wrong user and password
_response = self.client.post(
reverse('ats:login_view'),
{'username': 'dummyuser',
'password': 'dummypass'})
self.assertEqual(_response.status_code, 200)
def test_login_fail_password_miss(self):
self.client.logout()
# if not login
_response = self.client.get(reverse('ats:login_view'))
self.assertEqual(_response.status_code, 200)
_result = self.client.login(username=self.user.username,
password="dummypass")
self.assertFalse(_result)
_response = self.client.get(reverse('ats:login_view'))
self.assertEqual(_response.status_code, 200)
def test_login_fail(self):
self.client.logout()
# if not login
_response = self.client.get(reverse('ats:login_view'))
self.assertEqual(_response.status_code, 200)
_result = self.client.login(username="dummyuser",
password="12345678")
self.assertFalse(_result)
_response = self.client.get(reverse('ats:login_view'))
self.assertEqual(_response.status_code, 200)
class TopViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:top'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_top(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
class QueryViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:query'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_query(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
class ManageViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:manage'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_manage(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
class ManageChpasswdViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:manage_chpasswd'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_get(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
def test_post_success(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.post(
reverse(self.view_name), {
'old_password': self._password,
'new_password1': 'qwertyuiop',
'new_password2': 'qwertyuiop',
})
self.assertEqual(_response.status_code, 200)
def test_post_error(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
# missing old password
_response = self.client.post(
reverse(self.view_name), {
'old_password': '12345678',
'new_password1': 'qwertyuiop',
'new_password2': 'qwertyuiop',
})
self.assertEqual(_response.status_code, 200)
# difference new password.
_response = self.client.post(
reverse(self.view_name), {
'old_password': self._password,
'new_password1': 'qwertyuiop',
'new_password2': 'qwertyuiop@',
})
self.assertEqual(_response.status_code, 200)
class SummaryProjectViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:summary_p'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_get(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
def test_post(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'projectlist': 1,
'is_show_taskdetail': '0',
})
self.assertEqual(_response.status_code, 200)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'projectlist': 1,
'is_show_taskdetail': '1',
})
self.assertEqual(_response.status_code, 200)
def test_post_with_data(self):
_result = self.client.login(username='testuser100',
password='password')
self.assertTrue(_result)
# insert data
_project = Project.objects.get(pk=1)
_job = Job.objects.get(pk=1)
_task = list(Task.objects.filter(job=_job).order_by('id'))[0]
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project,
task=_task,
taskdate='2014-01-25',
tasktime='02:15:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project,
task=_task,
taskdate='2014-02-25',
tasktime='04:30:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project,
task=_task,
taskdate='2014-03-25',
tasktime='08:30:00'
)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'projectlist': _project.id,
'is_show_taskdetail': '0',
})
self.assertEqual(_response.status_code, 200)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'projectlist': _project.id,
'is_show_taskdetail': '1',
})
self.assertEqual(_response.status_code, 200)
class SummaryJobViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:summary_j'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_get(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
def test_post(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'joblist': 1,
})
self.assertEqual(_response.status_code, 200)
def test_post_with_data(self):
_result = self.client.login(username='testuser100',
password='password')
self.assertTrue(_result)
# insert data
_project = Project.objects.get(pk=1)
_job1 = Job.objects.get(pk=1)
_job2 = Job.objects.get(pk=2)
_task = list(Task.objects.filter(job=_job1).order_by('id'))[0]
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project,
task=_task,
taskdate='2014-01-25',
tasktime='02:15:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project,
task=_task,
taskdate='2014-02-25',
tasktime='04:30:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project,
task=_task,
taskdate='2014-03-25',
tasktime='08:30:00'
)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'joblist': [_job1.id, _job2.id],
})
self.assertEqual(_response.status_code, 200)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'joblist': [_job1.id, _job2.id],
})
self.assertEqual(_response.status_code, 200)
class SummaryUserViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:summary_u'
_password = 'passpass'
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
self._password)
def tearDown(self):
pass
def test_get(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
def test_post(self):
_result = self.client.login(username=self.user.username,
password=self._password)
self.assertTrue(_result)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'userlist': 2,
})
self.assertEqual(_response.status_code, 200)
def test_post_with_data(self):
_user_id_list = []
# inser data 1st user.
_result = self.client.login(username='testuser100',
password='password')
self.assertTrue(_result)
_user_id_list.append(self.user.id)
_project1 = Project.objects.get(pk=1)
_project2 = Project.objects.get(pk=2)
_job = Job.objects.get(pk=1)
_task = list(Task.objects.filter(job=_job).order_by('id'))[0]
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project1,
task=_task,
taskdate='2014-01-25',
tasktime='02:15:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project1,
task=_task,
taskdate='2014-02-25',
tasktime='04:30:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project1,
task=_task,
taskdate='2014-03-25',
tasktime='08:30:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project2,
task=_task,
taskdate='2014-01-25',
tasktime='02:15:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project2,
task=_task,
taskdate='2014-02-25',
tasktime='04:30:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project2,
task=_task,
taskdate='2014-03-25',
tasktime='08:30:00'
)
self.client.logout()
# inser data 2nd user.
_result = self.client.login(username='testuser200',
password='password')
self.assertTrue(_result)
_user_id_list.append(self.user.id)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project1,
task=_task,
taskdate='2014-01-25',
tasktime='02:15:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project1,
task=_task,
taskdate='2014-02-25',
tasktime='01:00:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project1,
task=_task,
taskdate='2014-03-25',
tasktime='08:30:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project2,
task=_task,
taskdate='2014-01-25',
tasktime='02:15:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project2,
task=_task,
taskdate='2014-02-25',
tasktime='01:00:00'
)
_obj = UsedTaskTime.objects.create(
user=self.user,
project=_project2,
task=_task,
taskdate='2014-03-25',
tasktime='08:30:00'
)
# self.client.logout()
# post
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'userlist': _user_id_list,
})
self.assertEqual(_response.status_code, 200)
_response = self.client.post(
reverse(self.view_name), {
'from_date': '2014-01-01',
'to_date': '2014-03-31',
'userlist': _user_id_list,
})
self.assertEqual(_response.status_code, 200)
class RegistViewTestCase(AtsViewTestCase):
fixtures = ['test_views.json']
client_class = AtsTestClient
view_name = 'ats:regist'
username = 'testuser100'
password = 'password'
def setUp(self):
self.factory = RequestFactory()
def tearDown(self):
pass
def test_get_unassign_user(self):
_user = User.objects.create_user(
'testuser1',
'testuser1@example.com',
'passpass')
_result = self.client.login(username=_user.username,
password='passpass')
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
def test_get(self):
_result = self.client.login(username=self.username,
password=self.password)
self.assertTrue(_result)
_response = self.client.get(reverse(self.view_name))
self.assertEqual(_response.status_code, 200)
def test_get_dateselect(self):
_result = self.client.login(username=self.username,
password=self.password)
self.assertTrue(_result)
_response = self.client.get(
reverse(self.view_name), {
'submit_type': 'dateselect',
'regist_date': '2014-01-30',
'projectlist': 1,
})
self.assertEqual(_response.status_code, 200)
def test_unsupported_method(self):
_result = self.client.login(username=self.username,
password=self.password)
self.assertTrue(_result)
_response = self.client.put(
reverse(self.view_name), {})
self.assertEqual(_response.status_code, 200)
def test_post_regist_nocheck(self):
_result = self.client.login(username=self.username,
password=self.password)
self.assertTrue(_result)
_response = self.client.post(
reverse(self.view_name), {
'submit_type': 'regist',
'regist_date': '2014-01-30',
'project_id': 1,
'registcheck': [],
'uttid': [],
'tasktime_hour': [],
'tasktime_min': [],
})
self.assertEqual(_response.status_code, 200)
def test_post_regist(self):
_user = User.objects.get(username=self.username)
_result = self.client.login(username=self.username,
password=self.password)
self.assertTrue(_result)
_project_id = 1
_pjw_qs = ProjectWorker.objects.filter(
user=_user, project__pk=_project_id).order_by('id')
_datalist = []
for pjw in _pjw_qs:
_job = Job.objects.get(pk=pjw.job.id)
_task_qs = Task.objects.filter(
job=_job).order_by('id')
for t in _task_qs:
# generate post data.
_data = {
'registcheck': 'p%s_t%s' % (pjw.project.id, t.id),
'uttid': 'p%s_t%s' % (pjw.project.id, t.id),
'tasktime_hour': 2,
'tasktime_min': 15,
}
_datalist.append(_data)
# regist (add)
_response = self.client.post(
reverse(self.view_name), {
'submit_type': 'regist',
'regist_date': '2014-01-30',
'project_id': _project_id,
'registcheck': [o['registcheck'] for o in _datalist],
'uttid': [o['uttid'] for o in _datalist],
'tasktime_hour': [o['tasktime_hour'] for o in _datalist],
'tasktime_min': [o['tasktime_min'] for o in _datalist],
})
# regist (update)
for d in _datalist:
d['tasktime_min'] = 30
_response = self.client.post(
reverse(self.view_name), {
'submit_type': 'regist',
'regist_date': '2014-01-30',
'project_id': _project_id,
'registcheck': [o['registcheck'] for o in _datalist],
'uttid': [o['uttid'] for o in _datalist],
'tasktime_hour': [o['tasktime_hour'] for o in _datalist],
'tasktime_min': [o['tasktime_min'] for o in _datalist],
})
# regist (delete)
for d in _datalist:
d['tasktime_hour'] = 0
d['tasktime_min'] = 0
_response = self.client.post(
reverse(self.view_name), {
'submit_type': 'regist',
'regist_date': '2014-01-30',
'project_id': _project_id,
'registcheck': [o['registcheck'] for o in _datalist],
'uttid': [o['uttid'] for o in _datalist],
'tasktime_hour': [o['tasktime_hour'] for o in _datalist],
'tasktime_min': [o['tasktime_min'] for o in _datalist],
})
self.assertEqual(_response.status_code, 200)
| 30.458757
| 73
| 0.549414
| 2,681
| 26,956
| 5.293174
| 0.075345
| 0.052146
| 0.054542
| 0.071524
| 0.866394
| 0.847368
| 0.825382
| 0.820943
| 0.8115
| 0.803256
| 0
| 0.041585
| 0.33896
| 26,956
| 884
| 74
| 30.493213
| 0.754812
| 0.012242
| 0
| 0.77554
| 0
| 0
| 0.111491
| 0.009473
| 0
| 0
| 0
| 0
| 0.099281
| 1
| 0.077698
| false
| 0.115108
| 0.01295
| 0
| 0.184173
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0f6b71be95be7ce75420c4b6a729f4804369a4e7
| 949
|
py
|
Python
|
UrsinaShaderBuilder/ExtraData/extra_models.py
|
Werxzy/UrsinaShaderBuilder
|
1ee13e36d3787a73e84f6b4bf9b51c51ba75c79b
|
[
"MIT"
] | 3
|
2022-03-01T00:01:30.000Z
|
2022-03-13T16:05:06.000Z
|
UrsinaShaderBuilder/ExtraData/extra_models.py
|
Werxzy/UrsinaShaderBuilder
|
1ee13e36d3787a73e84f6b4bf9b51c51ba75c79b
|
[
"MIT"
] | null | null | null |
UrsinaShaderBuilder/ExtraData/extra_models.py
|
Werxzy/UrsinaShaderBuilder
|
1ee13e36d3787a73e84f6b4bf9b51c51ba75c79b
|
[
"MIT"
] | null | null | null |
from ursina import Vec3
# mode = 'ngon'
x_vert = [Vec3(0,1,0), Vec3(-1,2,0), Vec3(-2,1,0), Vec3(-1,0,0), Vec3(-2,-1,0), Vec3(-1,-2,0), Vec3(0,-1,0), Vec3(1,-2,0), Vec3(2,-1,0), Vec3(1,0,0), Vec3(2,1,0), Vec3(1,2,0)]
check_vert = [Vec3(-0.5,0,0), Vec3(-1.5,1,0), Vec3(-2.5,0,0), Vec3(-0.5,-2,0), Vec3(2.5,1,0), Vec3(1.5,2,0)]
down_arrow_vert = [Vec3(0,0,0), Vec3(-1,1,0), Vec3(-2,0,0), Vec3(0,-2,0), Vec3(2,0,0), Vec3(1,1,0)]
right_arrow_vert = [Vec3(0,0,0), Vec3(-1,-1,0), Vec3(0,-2,0), Vec3(2,0,0), Vec3(0,2,0), Vec3(-1,1,0)]
up_arrow_vert = [Vec3(0,0,0), Vec3(1,-1,0), Vec3(2,0,0), Vec3(0,2,0), Vec3(-2,0,0), Vec3(-1,-1,0)]
left_arrow_vert = [Vec3(0,0,0), Vec3(1,1,0), Vec3(0,2,0), Vec3(-2,0,0), Vec3(0,-2,0), Vec3(1,-1,0)]
# mode = 'triangle'
scale_arrow_vert = [Vec3(-2,2,0), Vec3(-2,-1,0), Vec3(1,2,0),
Vec3(1,0,0), Vec3(0,1,0), Vec3(-1,0,0),
Vec3(1,0,0), Vec3(-1,0,0), Vec3(0,-1,0),
Vec3(2,-2,0), Vec3(2,1,0), Vec3(-1,-2,0)]
| 67.785714
| 175
| 0.542677
| 249
| 949
| 2.02008
| 0.080321
| 0.467197
| 0.262425
| 0.139165
| 0.739563
| 0.739563
| 0.739563
| 0.739563
| 0.739563
| 0.739563
| 0
| 0.262045
| 0.103267
| 949
| 14
| 176
| 67.785714
| 0.329025
| 0.032666
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0f6e4a410cf57c465a5c2b2cb63e53b5f1a4f640
| 31,494
|
py
|
Python
|
read_xml_all/calcul_matrix_je_le_qui_dans_de_192_matrix_good_compare_1.py
|
daniel20162016/my-first
|
f9554dd476302b26e8a296393025f150922f349c
|
[
"MIT"
] | null | null | null |
read_xml_all/calcul_matrix_je_le_qui_dans_de_192_matrix_good_compare_1.py
|
daniel20162016/my-first
|
f9554dd476302b26e8a296393025f150922f349c
|
[
"MIT"
] | null | null | null |
read_xml_all/calcul_matrix_je_le_qui_dans_de_192_matrix_good_compare_1.py
|
daniel20162016/my-first
|
f9554dd476302b26e8a296393025f150922f349c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 31 15:45:22 2016
@author: wang
"""
#from matplotlib import pylab as plt
#from numpy import fft, fromstring, int16, linspace
#import wave
from read_wav_xml_good_1 import*
from matrix_24_2 import*
from max_matrix_norm import*
import numpy as np
# open a wave file
filename = 'francois_filon_pure_3.wav'
filename_1 ='francois_filon_pure_3.xml'
word ='je'
word_2='le'
word_3='qui'
word_4='dans'
word_5='de'
#==============================================================================
# this is the parti for the 'je' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
je_compare_1 = matrix_all_step_new_1
je_compare_2 = matrix_all_step_new_2
je_compare_3 = matrix_all_step_new_3
je_compare_4 = matrix_all_step_new_4
je_compare_5 = matrix_all_step_new_5
#==============================================================================
# # this is the parti for the 'je' end
#==============================================================================
#np.savez('je_le_qui_dans_de_192_matrix.npz',matrix_all_step_new_1,matrix_all_step_new_2,matrix_all_step_new_3,matrix_all_step_new_4,matrix_all_step_new_5)
#==============================================================================
# this is the parti for the 'le' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_2)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
le_compare_1 = matrix_all_step_new_1
le_compare_2 = matrix_all_step_new_2
le_compare_3 = matrix_all_step_new_3
le_compare_4 = matrix_all_step_new_4
le_compare_5 = matrix_all_step_new_5
#==============================================================================
# # this is the parti for the 'le' end
#==============================================================================
#==============================================================================
# this is the parti for the 'qui' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_3)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
qui_compare_1 = matrix_all_step_new_1
qui_compare_2 = matrix_all_step_new_2
qui_compare_3 = matrix_all_step_new_3
qui_compare_4 = matrix_all_step_new_4
qui_compare_5 = matrix_all_step_new_5
#==============================================================================
# this is the parti for the 'dans' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_4)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
dans_compare_1 = matrix_all_step_new_1
dans_compare_2 = matrix_all_step_new_2
dans_compare_3 = matrix_all_step_new_3
dans_compare_4 = matrix_all_step_new_4
dans_compare_5 = matrix_all_step_new_5
#==============================================================================
# this is the parti for the 'de' start
#==============================================================================
wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word_5)
XJ_1 =wave_signal_float
t_step=1920;
t_entre_step=1440;
t_du_1_1 = int(word_start_point[0]);
t_du_1_2 = int(word_end_point[0]);
t_du_2_1 = int(word_start_point[1]);
t_du_2_2 = int(word_end_point[1]);
t_du_3_1 = int(word_start_point[2]);
t_du_3_2 = int(word_end_point[2]);
t_du_4_1 = int(word_start_point[3]);
t_du_4_2 = int(word_end_point[3]);
t_du_5_1 = int(word_start_point[4]);
t_du_5_2 = int(word_end_point[4]);
fs=framerate
#XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2];
#length_XJ_du_1 = int(word_length_point[0]+1);
#x1,y1,z1=matrix_24_2(XJ_du_1,fs)
#x1=max_matrix_norm(x1)
#==============================================================================
# this part is to calcul the first matrix
#==============================================================================
XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_1 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_1[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_1[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the second matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_2_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_2 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_2[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_2[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 3 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_3_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_3 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_3[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_3[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 4 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_4_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_4 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_4[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_4[24*i+j]=x1_all[j]
#==============================================================================
# this part is to calcul the 5 matrix
#==============================================================================
for k in range (1,2):
t_start=t_du_5_1
XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)];
x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs)
x1_1=max_matrix_norm(x1_1)
matrix_all_step_new_5 = np.zeros([192])
for i in range(0,24):
matrix_all_step_new_5[i]=x1_1[i]
#==============================================================================
# the other colonne is the all fft
#==============================================================================
for i in range(1,8):
# print i
XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )];
x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs)
x1_all=max_matrix_norm(x1_all)
for j in range(0,24):
matrix_all_step_new_5[24*i+j]=x1_all[j]
de_compare_1 = matrix_all_step_new_1
de_compare_2 = matrix_all_step_new_2
de_compare_3 = matrix_all_step_new_3
de_compare_4 = matrix_all_step_new_4
de_compare_5 = matrix_all_step_new_5
print 'finish_part_2'
#==============================================================================
# # this is the parti for the 'le' end
#==============================================================================
np.savez('je_le_qui_dans_de_192_matrix_compare.npz',je_compare_1,je_compare_2,je_compare_3,je_compare_4,je_compare_5,le_compare_1,le_compare_2,le_compare_3,le_compare_4,le_compare_5,qui_compare_1,qui_compare_2,qui_compare_3,qui_compare_4,qui_compare_5,dans_compare_1,dans_compare_2,dans_compare_3,dans_compare_4,dans_compare_5,de_compare_1,de_compare_2,de_compare_3,de_compare_4,de_compare_5)
| 45.25
| 392
| 0.498539
| 5,029
| 31,494
| 2.679061
| 0.019885
| 0.034513
| 0.042678
| 0.124694
| 0.949826
| 0.945001
| 0.942181
| 0.901432
| 0.899577
| 0.899577
| 0
| 0.065017
| 0.122404
| 31,494
| 696
| 392
| 45.25
| 0.422447
| 0.392773
| 0
| 0.905077
| 0
| 0
| 0.006154
| 0.004775
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.00883
| null | null | 0.002208
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0f885f32400bdb20dc67b77fc5021d99ed6b859f
| 84
|
py
|
Python
|
frameworks/NODE/__init__.py
|
termit209/automlbenchmark
|
07046564a5fac762a9beae8e77a9a672170873c7
|
[
"MIT"
] | null | null | null |
frameworks/NODE/__init__.py
|
termit209/automlbenchmark
|
07046564a5fac762a9beae8e77a9a672170873c7
|
[
"MIT"
] | 1
|
2021-02-04T11:57:14.000Z
|
2021-02-04T11:59:54.000Z
|
frameworks/NODE/__init__.py
|
termit209/automlbenchmark
|
07046564a5fac762a9beae8e77a9a672170873c7
|
[
"MIT"
] | 2
|
2021-02-04T12:00:49.000Z
|
2021-02-04T12:26:23.000Z
|
def run(*args, **kwargs):
from .exec import run
return run(*args, **kwargs)
| 21
| 31
| 0.619048
| 12
| 84
| 4.333333
| 0.666667
| 0.269231
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 84
| 4
| 31
| 21
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0e291545e3b5c24d932d94c62743c850a5df89f3
| 1,927
|
py
|
Python
|
recipes-python_st/imx6sx-webserver/addwebserver/Flask_orion/app/xml_parser.py
|
Stefan12321/meta-python_st
|
8ff187963bfd15dbfd6a6c74ae913935be3e6cce
|
[
"MIT"
] | null | null | null |
recipes-python_st/imx6sx-webserver/addwebserver/Flask_orion/app/xml_parser.py
|
Stefan12321/meta-python_st
|
8ff187963bfd15dbfd6a6c74ae913935be3e6cce
|
[
"MIT"
] | null | null | null |
recipes-python_st/imx6sx-webserver/addwebserver/Flask_orion/app/xml_parser.py
|
Stefan12321/meta-python_st
|
8ff187963bfd15dbfd6a6c74ae913935be3e6cce
|
[
"MIT"
] | 1
|
2022-02-08T08:35:31.000Z
|
2022-02-08T08:35:31.000Z
|
import xml.etree.cElementTree as ET
def xml_parser_read(path='/files_61850/KALINA.CID'):
tree = ET.parse(path)
root = tree.getroot()
pref = '{http://www.iec.ch/61850/2003/SCL}'
ip = root.findall(
'{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP/{pref}Address/{pref}P[@type="IP"]'.format(pref=pref))
mask = root.findall(
'{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP/{pref}Address/{pref}P[@type="IP-SUBNET"]'.format(pref=pref))
gate = root.findall(
'{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP/{pref}Address/{pref}P[@type="IP-GATEWAY"]'.format(pref=pref))
ied1 = root.findall('{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP'.format(pref=pref))
ied2 = root.findall('{pref}IED'.format(pref=pref))
# print(ip[0].text, mask[0].text, gate[0].text)
# ip[0].text = "192.168.2.13"
return ip[0].text, mask[0].text, gate[0].text, ied2[0].attrib.get('name')
def xml_parser_write(ip, mask, gate, ied, path='/files_61850/KALINA.CID'):
tree = ET.parse(path)
root = tree.getroot()
pref = '{http://www.iec.ch/61850/2003/SCL}'
ip_path = root.findall('{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP/{pref}Address/{pref}P[@type="IP"]'.format(pref=pref))
mask_path = root.findall('{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP/{pref}Address/{pref}P[@type="IP-SUBNET"]'.format(pref=pref))
gate_path = root.findall('{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP/{pref}Address/{pref}P[@type="IP-GATEWAY"]'.format(pref=pref))
ied1_path = root.findall('{pref}Communication/{pref}SubNetwork/{pref}ConnectedAP'.format(pref=pref))
ied2_path = root.findall('{pref}IED'.format(pref=pref))
ip_path[0].text = ip
mask_path[0].text = mask
gate_path[0].text = gate
ied1_path[0].attrib = {'apName': 'S1', 'iedName': ied}
ied2_path[0].attrib = {'name': ied}
tree.write(path)
| 53.527778
| 146
| 0.679813
| 279
| 1,927
| 4.637993
| 0.200717
| 0.085008
| 0.11592
| 0.173107
| 0.814529
| 0.814529
| 0.814529
| 0.76507
| 0.76507
| 0.723338
| 0
| 0.034058
| 0.116243
| 1,927
| 36
| 147
| 53.527778
| 0.725778
| 0.037883
| 0
| 0.206897
| 0
| 0.206897
| 0.443305
| 0.384449
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.034483
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0e41c7c27bf635315f4fb9ff9b27bbf64aeaed81
| 4,951
|
py
|
Python
|
usaspending_api/disaster/tests/integration/test_disaster_award_amount.py
|
jbuendiallc/usaspending-api
|
f827870cbca4b6a6e16f1c5272bb2ff73a113d76
|
[
"CC0-1.0"
] | 1
|
2020-08-14T04:14:32.000Z
|
2020-08-14T04:14:32.000Z
|
usaspending_api/disaster/tests/integration/test_disaster_award_amount.py
|
jbuendiallc/usaspending-api
|
f827870cbca4b6a6e16f1c5272bb2ff73a113d76
|
[
"CC0-1.0"
] | null | null | null |
usaspending_api/disaster/tests/integration/test_disaster_award_amount.py
|
jbuendiallc/usaspending-api
|
f827870cbca4b6a6e16f1c5272bb2ff73a113d76
|
[
"CC0-1.0"
] | null | null | null |
import pytest
from rest_framework import status
url = "/api/v2/disaster/award/amount/"
@pytest.mark.django_db
def test_award_amount_success(client, monkeypatch, generic_account_data, unlinked_faba_account_data, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
helpers.reset_dabs_cache()
resp = helpers.post_for_amount_endpoint(client, url, ["L"], ["A", "09", "10"])
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 1
assert resp.data["outlay"] == 222
assert resp.data["obligation"] == 200
resp = helpers.post_for_amount_endpoint(client, url, ["N", "O"], ["A", "07", "08"])
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 2
assert resp.data["outlay"] == 334
assert resp.data["obligation"] == 4
resp = helpers.post_for_amount_endpoint(client, url, ["9"], ["B"])
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 0
assert resp.data["outlay"] == 0
assert resp.data["obligation"] == 0
@pytest.mark.django_db
def test_award_amount_no_award_type_success(
client, monkeypatch, generic_account_data, unlinked_faba_account_data, helpers
):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_amount_endpoint(client, url, ["N"], None)
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 4
assert resp.data["outlay"] == 10890108.00
assert resp.data["obligation"] == 1088898.00
resp = helpers.post_for_amount_endpoint(client, url, ["L", "M", "N", "O", "P"], None)
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 7
assert resp.data["outlay"] == 10890997.00
assert resp.data["obligation"] == 1089204.00
@pytest.mark.django_db
def test_award_amount_on_sum_non_zero_toa(client, monkeypatch, multiple_file_c_to_same_award, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
helpers.reset_dabs_cache()
resp = helpers.post_for_amount_endpoint(client, url, ["M"], None)
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 1
assert resp.data["outlay"] == 0.0
assert resp.data["obligation"] == 14.0
@pytest.mark.django_db
def test_award_amount_on_sum_non_zero_outlay(client, monkeypatch, multiple_outlay_file_c_to_same_award, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
helpers.reset_dabs_cache()
resp = helpers.post_for_amount_endpoint(client, url, ["M"], None)
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 1
assert resp.data["outlay"] == 14.0
assert resp.data["obligation"] == 0.0
@pytest.mark.django_db
def test_award_amount_on_sum_zero_toa(client, monkeypatch, multiple_file_c_to_same_award_that_cancel_out, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
helpers.reset_dabs_cache()
resp = helpers.post_for_amount_endpoint(client, url, ["M"], None)
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 0
assert resp.data["outlay"] == 0.0
assert resp.data["obligation"] == 0.0
@pytest.mark.django_db
def test_award_amount_invalid_defc(client, monkeypatch, generic_account_data, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_amount_endpoint(client, url, ["ZZ"], ["A", "09", "10"])
assert resp.status_code == status.HTTP_400_BAD_REQUEST
assert resp.data["detail"] == "Field 'filter|def_codes' is outside valid values ['9', 'A', 'L', 'M', 'N', 'O', 'P']"
@pytest.mark.django_db
def test_award_amount_exclusive_filters(client, generic_account_data, helpers):
resp = helpers.post_for_amount_endpoint(
client, url, ["M"], award_type_codes=["A", "09", "10"], award_type="procurement"
)
assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
@pytest.mark.django_db
def test_award_amount_bad_award_type_value(client, helpers):
resp = helpers.post_for_amount_endpoint(client, url, ["ZZ"], award_type="financial")
assert resp.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
def test_award_type_filters(client, monkeypatch, generic_account_data, unlinked_faba_account_data, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
helpers.reset_dabs_cache()
resp = helpers.post_for_amount_endpoint(client, url, ["L"], award_type="procurement")
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 2
assert resp.data["outlay"] == 777
assert resp.data["obligation"] == 205
resp = helpers.post_for_amount_endpoint(client, url, ["N", "O"], award_type="assistance")
assert resp.status_code == status.HTTP_200_OK
assert resp.data["award_count"] == 3
assert resp.data["outlay"] == 1222
assert resp.data["obligation"] == 12
| 39.608
| 120
| 0.722278
| 715
| 4,951
| 4.699301
| 0.160839
| 0.130952
| 0.129167
| 0.069643
| 0.827679
| 0.802381
| 0.795238
| 0.78631
| 0.741964
| 0.63869
| 0
| 0.047081
| 0.141992
| 4,951
| 124
| 121
| 39.927419
| 0.743879
| 0
| 0
| 0.5
| 0
| 0.010638
| 0.095536
| 0.006059
| 0
| 0
| 0
| 0
| 0.468085
| 1
| 0.095745
| false
| 0
| 0.021277
| 0
| 0.117021
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e43b04c38d9bdc7a00d6636f52199eb19ef6669
| 2,592
|
py
|
Python
|
src/utils/batch_cpu_insert.py
|
Metroynome/robo
|
78c389decce98d0d1e4e4e02ccbfcba7b465209c
|
[
"MIT"
] | null | null | null |
src/utils/batch_cpu_insert.py
|
Metroynome/robo
|
78c389decce98d0d1e4e4e02ccbfcba7b465209c
|
[
"MIT"
] | null | null | null |
src/utils/batch_cpu_insert.py
|
Metroynome/robo
|
78c389decce98d0d1e4e4e02ccbfcba7b465209c
|
[
"MIT"
] | null | null | null |
import sqlite3
import random
n_cpus = 999
db_file = "../../logs/database.db"
db_file = "file:" + db_file + "?mode=" + "rwc"
print("Using DB: {}".format(db_file))
# This will raise an error if it can't connect
conn = sqlite3.connect(db_file, uri=True, check_same_thread=False)
default_stats = '00C0A84400C0A84400C0A84400C0A8440000AF430000AF430000AF430000AF430000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
default_ladderstatswide = '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
def generate_session_key() -> bytes:
new_session_key = ''.join(random.choice('0123456789ABCDEF') for i in range(16)) + '\0'
new_session_key = new_session_key.encode()
return new_session_key
def _create_new_user(username: str, encrypted_password: str, session_key: bytes):
c = conn.cursor()
insert_command = """INSERT INTO users
(account_type, username, password, session_key, stats, ladderstatswide)
values(?,?,?,?,?,?);
"""
account_type = 2
stats = default_stats
ladderstatswide = default_ladderstatswide
c.execute(insert_command, [account_type, username, encrypted_password, session_key.decode(), stats, ladderstatswide])
conn.commit()
c.close()
print(f"Created new user: {username}")
for i in range(1, n_cpus):
_create_new_user(f"CPU-{str(i).zfill(3)}", "", generate_session_key())
| 66.461538
| 829
| 0.824846
| 162
| 2,592
| 12.944444
| 0.493827
| 0.042918
| 0.024797
| 0.010491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.570871
| 0.109954
| 2,592
| 38
| 830
| 68.210526
| 0.338101
| 0.016975
| 0
| 0
| 0
| 0
| 0.641946
| 0.540271
| 0.035714
| 1
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.107143
| 0.071429
| 0
| 0.178571
| 0.071429
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7ef0ddd65f9d900e2aa0d797937d9c580d6ad255
| 33
|
py
|
Python
|
server/blueprints/admin_home/__init__.py
|
prakashsellathurai/fashion-Image-Recommender
|
e0f481133f19e3b8f7f45bf6fc97e39ea866d050
|
[
"MIT"
] | 2
|
2021-07-31T14:01:01.000Z
|
2021-08-01T11:09:57.000Z
|
server/blueprints/admin_home/__init__.py
|
prakashsellathurai/fashion-Image-Recommender
|
e0f481133f19e3b8f7f45bf6fc97e39ea866d050
|
[
"MIT"
] | 4
|
2021-04-30T21:36:10.000Z
|
2021-11-10T19:58:49.000Z
|
server/blueprints/admin_home/__init__.py
|
prakashsellathurai/fashion-Image-Recommender
|
e0f481133f19e3b8f7f45bf6fc97e39ea866d050
|
[
"MIT"
] | null | null | null |
from .blueprint import admin_home
| 33
| 33
| 0.878788
| 5
| 33
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 33
| 1
| 33
| 33
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
7efdb5e041e5ac7a51dad9560ec80c85ac2503ba
| 190
|
py
|
Python
|
test/integration/samples_in/two_liner.py
|
Inveracity/flynt
|
b975b6f61893d5db1114d68fbb5d212c4e11aeb8
|
[
"MIT"
] | 487
|
2019-06-10T17:44:56.000Z
|
2022-03-26T01:28:19.000Z
|
test/integration/samples_in/two_liner.py
|
Inveracity/flynt
|
b975b6f61893d5db1114d68fbb5d212c4e11aeb8
|
[
"MIT"
] | 118
|
2019-07-03T12:26:39.000Z
|
2022-03-06T22:40:17.000Z
|
test/integration/samples_in/two_liner.py
|
Inveracity/flynt
|
b975b6f61893d5db1114d68fbb5d212c4e11aeb8
|
[
"MIT"
] | 25
|
2019-07-10T08:39:58.000Z
|
2022-03-03T14:44:15.000Z
|
x, y = 1, 2
a = ('line 1 {}'
' line 2 {}'.format(x, y))
b = ('line 1234567890 1234567890 1234567890 1234567890 {}'
' 1234567890 1234567890 1234567890 1234567890 {}'.format(x, y))
| 31.666667
| 69
| 0.610526
| 25
| 190
| 4.64
| 0.36
| 1.206897
| 1.551724
| 1.724138
| 0.689655
| 0.689655
| 0.689655
| 0
| 0
| 0
| 0
| 0.563758
| 0.215789
| 190
| 6
| 69
| 31.666667
| 0.214765
| 0
| 0
| 0
| 0
| 0
| 0.617801
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7d441d1b467697a91e698e563ebd6b0769e7f99f
| 26,551
|
py
|
Python
|
tests/test_packages/test_connections/test_http_server/test_http_server.py
|
lrahmani/agents-aea
|
9bd1d51530fc21bf41b5adea031cda19a94b048b
|
[
"Apache-2.0"
] | null | null | null |
tests/test_packages/test_connections/test_http_server/test_http_server.py
|
lrahmani/agents-aea
|
9bd1d51530fc21bf41b5adea031cda19a94b048b
|
[
"Apache-2.0"
] | null | null | null |
tests/test_packages/test_connections/test_http_server/test_http_server.py
|
lrahmani/agents-aea
|
9bd1d51530fc21bf41b5adea031cda19a94b048b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains the tests of the gym connection module."""
import asyncio
import concurrent.futures
import functools
import http.client
import logging
import os
from threading import Thread
from typing import Dict, Tuple, cast
import pytest
from aea.configurations.base import PublicId
from aea.mail.base import Envelope
from packages.fetchai.connections.http_server.connection import HTTPServerConnection
from packages.fetchai.protocols.http.message import HttpMessage
from packages.fetchai.protocols.http.serialization import HttpSerializer
from ....conftest import (
ROOT_DIR,
get_host,
get_unused_tcp_port,
)
logger = logging.getLogger(__name__)
@pytest.mark.asyncio
class TestHTTPServerConnectionConnectDisconnect:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class and test connect."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
assert cls.http_connection.channel.is_stopped
cls.http_connection.channel.connect()
assert not cls.http_connection.channel.is_stopped
@pytest.mark.asyncio
async def test_http_connection_disconnect_channel(self):
"""Test the disconnect."""
self.http_connection.channel.disconnect()
assert self.http_connection.channel.is_stopped
@pytest.mark.asyncio
class TestHTTPServerConnectionSend:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
loop = asyncio.get_event_loop()
value = loop.run_until_complete(cls.http_connection.connect())
assert value is None
assert cls.http_connection.connection_status.is_connected
@pytest.mark.asyncio
async def test_send_connection_drop(self):
"""Test send connection error."""
client_id = "to_key"
message = HttpMessage(
performative=HttpMessage.Performative.RESPONSE,
dialogue_reference=("", ""),
target=1,
message_id=2,
headers="",
version="",
status_code=200,
status_text="Success",
bodyy=b"",
)
envelope = Envelope(
to=client_id,
sender="from_key",
protocol_id=self.protocol_id,
message=HttpSerializer().encode(message),
)
await self.http_connection.send(envelope)
# we expect the envelope to be dropped
assert (
self.http_connection.channel.dispatch_ready_envelopes.get(client_id) is None
)
@pytest.mark.asyncio
async def test_send_connection_send(self):
"""Test send connection error."""
client_id = "to_key"
message = HttpMessage(
performative=HttpMessage.Performative.RESPONSE,
dialogue_reference=("", ""),
target=1,
message_id=2,
headers="",
version="",
status_code=200,
status_text="Success",
bodyy=b"",
)
envelope = Envelope(
to=client_id,
sender="from_key",
protocol_id=self.protocol_id,
message=HttpSerializer().encode(message),
)
self.http_connection.channel.pending_request_ids.add("to_key")
await self.http_connection.send(envelope)
assert (
self.http_connection.channel.dispatch_ready_envelopes.get(client_id)
== envelope
)
assert self.http_connection.channel.pending_request_ids == set()
# clean up:
self.http_connection.channel.dispatch_ready_envelopes = (
{}
) # type: Dict[str, Envelope]
@classmethod
def teardown_class(cls):
"""Teardown the class."""
loop = asyncio.get_event_loop()
value = loop.run_until_complete(cls.http_connection.disconnect())
assert value is None
@pytest.mark.asyncio
class TestHTTPServerConnectionGET404:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
cls.loop = asyncio.new_event_loop()
# cls.loop.set_debug(enabled=True)
cls.http_connection.loop = cls.loop
value = cls.loop.run_until_complete(cls.http_connection.connect())
assert value is None
assert cls.http_connection.connection_status.is_connected
assert not cls.http_connection.channel.is_stopped
cls.t = Thread(target=cls.loop.run_forever)
cls.t.start()
@pytest.mark.asyncio
async def test_get_404(self):
"""Test send post request w/ 404 response."""
def request_response_cycle(host, port) -> Tuple[int, str, bytes]:
conn = http.client.HTTPConnection(host, port)
conn.request("GET", "/")
response = conn.getresponse()
return response.status, response.reason, response.read()
async def client_thread(host, port) -> Tuple[int, str, bytes]:
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
executor,
functools.partial(request_response_cycle, host=host, port=port),
)
return result
response_status_code, response_status_text, response_body = await client_thread(
self.host, self.port
)
assert (
response_status_code == 404
and response_status_text == "Request Not Found"
and response_body == b""
)
@classmethod
def teardown_class(cls):
"""Teardown the class."""
cls.loop.call_soon_threadsafe(cls.loop.stop)
cls.t.join()
value = cls.loop.run_until_complete(cls.http_connection.disconnect())
assert value is None
@pytest.mark.asyncio
class TestHTTPServerConnectionGET408:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
cls.loop = asyncio.new_event_loop()
# cls.loop.set_debug(enabled=True)
cls.http_connection.loop = cls.loop
value = cls.loop.run_until_complete(cls.http_connection.connect())
assert value is None
assert cls.http_connection.connection_status.is_connected
assert not cls.http_connection.channel.is_stopped
cls.t = Thread(target=cls.loop.run_forever)
cls.t.start()
@pytest.mark.asyncio
async def test_get_408(self):
"""Test send get request w/ 408 response."""
def request_response_cycle(host, port) -> Tuple[int, str, bytes]:
conn = http.client.HTTPConnection(host, port)
conn.request("GET", "/pets")
response = conn.getresponse()
return response.status, response.reason, response.read()
async def client_thread(host, port) -> Tuple[int, str, bytes]:
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
executor,
functools.partial(request_response_cycle, host=host, port=port),
)
return result
async def agent_processing(http_connection, address) -> bool:
# we block here to give it some time for the envelope to make it to the queue
await asyncio.sleep(8)
envelope = await http_connection.receive()
is_exiting_correctly = (
envelope is not None
and envelope.to == address
and len(http_connection.channel.timed_out_request_ids) == 1
)
return is_exiting_correctly
client_task = asyncio.ensure_future(client_thread(self.host, self.port))
agent_task = asyncio.ensure_future(
agent_processing(self.http_connection, self.address)
)
await asyncio.gather(client_task, agent_task)
response_status_code, response_status_text, response_body = client_task.result()
is_exiting_correctly = agent_task.result()
assert (
response_status_code == 408
and response_status_text == "Request Timeout"
and response_body == b""
)
assert is_exiting_correctly
@classmethod
def teardown_class(cls):
"""Teardown the class."""
cls.loop.call_soon_threadsafe(cls.loop.stop)
cls.t.join()
value = cls.loop.run_until_complete(cls.http_connection.disconnect())
assert value is None
@pytest.mark.asyncio
class TestHTTPServerConnectionGET200:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
cls.loop = asyncio.new_event_loop()
# cls.loop.set_debug(enabled=True)
cls.http_connection.loop = cls.loop
value = cls.loop.run_until_complete(cls.http_connection.connect())
assert value is None
assert cls.http_connection.connection_status.is_connected
assert not cls.http_connection.channel.is_stopped
cls.t = Thread(target=cls.loop.run_forever)
cls.t.start()
@pytest.mark.asyncio
async def test_get_200(self):
"""Test send get request w/ 200 response."""
def request_response_cycle(host, port) -> Tuple[int, str, bytes]:
conn = http.client.HTTPConnection(host, port)
conn.request("GET", "/pets")
response = conn.getresponse()
return response.status, response.reason, response.read()
async def client_thread(host, port) -> Tuple[int, str, bytes]:
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
executor,
functools.partial(request_response_cycle, host=host, port=port),
)
return result
async def agent_processing(http_connection) -> bool:
# we block here to give it some time for the envelope to make it to the queue
await asyncio.sleep(1)
envelope = await http_connection.receive()
if envelope is not None:
incoming_message = cast(
HttpMessage, HttpSerializer().decode(envelope.message)
)
message = HttpMessage(
performative=HttpMessage.Performative.RESPONSE,
dialogue_reference=("", ""),
target=incoming_message.message_id,
message_id=incoming_message.message_id + 1,
version=incoming_message.version,
headers=incoming_message.headers,
status_code=200,
status_text="Success",
bodyy=b"Response body",
)
response_envelope = Envelope(
to=envelope.sender,
sender=envelope.to,
protocol_id=envelope.protocol_id,
context=envelope.context,
message=HttpSerializer().encode(message),
)
await http_connection.send(response_envelope)
is_exiting_correctly = True
else:
is_exiting_correctly = False
return is_exiting_correctly
client_task = asyncio.ensure_future(client_thread(self.host, self.port))
agent_task = asyncio.ensure_future(agent_processing(self.http_connection))
await asyncio.gather(client_task, agent_task)
response_status_code, response_status_text, response_body = client_task.result()
is_exiting_correctly = agent_task.result()
assert (
response_status_code == 200
and response_status_text == "Success"
and response_body == b"Response body"
)
assert is_exiting_correctly
@classmethod
def teardown_class(cls):
"""Teardown the class."""
cls.loop.call_soon_threadsafe(cls.loop.stop)
cls.t.join()
value = cls.loop.run_until_complete(cls.http_connection.disconnect())
assert value is None
@pytest.mark.asyncio
class TestHTTPServerConnectionPOST404:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
cls.loop = asyncio.new_event_loop()
cls.http_connection.loop = cls.loop
value = cls.loop.run_until_complete(cls.http_connection.connect())
assert value is None
assert cls.http_connection.connection_status.is_connected
assert not cls.http_connection.channel.is_stopped
cls.t = Thread(target=cls.loop.run_forever)
cls.t.start()
@pytest.mark.asyncio
async def test_post_404(self):
"""Test send post request w/ 404 response."""
def request_response_cycle(host, port):
conn = http.client.HTTPConnection(host, port)
body = "some body"
conn.request("POST", "/", body)
response = conn.getresponse()
return response.status, response.reason, response.read()
async def client_thread(host, port):
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
executor,
functools.partial(request_response_cycle, host=host, port=port),
)
return result
response_status_code, response_status_text, response_body = await client_thread(
self.host, self.port
)
assert (
response_status_code == 404
and response_status_text == "Request Not Found"
and response_body == b""
)
@classmethod
def teardown_class(cls):
"""Teardown the class."""
cls.loop.call_soon_threadsafe(cls.loop.stop)
cls.t.join()
value = cls.loop.run_until_complete(cls.http_connection.disconnect())
assert value is None
@pytest.mark.asyncio
class TestHTTPServerConnectionPOST408:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
cls.loop = asyncio.new_event_loop()
cls.http_connection.loop = cls.loop
value = cls.loop.run_until_complete(cls.http_connection.connect())
assert value is None
assert cls.http_connection.connection_status.is_connected
assert not cls.http_connection.channel.is_stopped
cls.t = Thread(target=cls.loop.run_forever)
cls.t.start()
@pytest.mark.asyncio
async def test_post_408(self):
"""Test send post request w/ 408 response."""
def request_response_cycle(host, port):
conn = http.client.HTTPConnection(host, port)
body = "some body"
conn.request("POST", "/pets", body)
response = conn.getresponse()
return response.status, response.reason, response.read()
async def client_thread(host, port):
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
executor,
functools.partial(request_response_cycle, host=host, port=port),
)
return result
async def agent_processing(http_connection, address) -> bool:
# we block here to give it some time for the envelope to make it to the queue
await asyncio.sleep(8)
envelope = await http_connection.receive()
is_exiting_correctly = (
envelope is not None
and envelope.to == address
and len(http_connection.channel.timed_out_request_ids) == 1
)
return is_exiting_correctly
client_task = asyncio.ensure_future(client_thread(self.host, self.port))
agent_task = asyncio.ensure_future(
agent_processing(self.http_connection, self.address)
)
await asyncio.gather(client_task, agent_task)
response_status_code, response_status_text, response_body = client_task.result()
is_exiting_correctly = agent_task.result()
assert (
response_status_code == 408
and response_status_text == "Request Timeout"
and response_body == b""
)
assert is_exiting_correctly
@classmethod
def teardown_class(cls):
"""Teardown the class."""
cls.loop.call_soon_threadsafe(cls.loop.stop)
cls.t.join()
value = cls.loop.run_until_complete(cls.http_connection.disconnect())
assert value is None
@pytest.mark.asyncio
class TestHTTPServerConnectionPOST201:
"""Test the packages/fetchai/connection/http/connection.py."""
@classmethod
def setup_class(cls):
"""Initialise the class."""
cls.address = "my_key"
cls.host = get_host()
cls.port = get_unused_tcp_port()
cls.api_spec_path = os.path.join(ROOT_DIR, "tests", "data", "petstore_sim.yaml")
cls.connection_id = PublicId("fetchai", "http_server", "0.1.0")
cls.protocol_id = PublicId("fetchai", "http", "0.1.0")
cls.http_connection = HTTPServerConnection(
address=cls.address,
host=cls.host,
port=cls.port,
api_spec_path=cls.api_spec_path,
connection_id=cls.connection_id,
restricted_to_protocols=set([cls.protocol_id]),
)
cls.loop = asyncio.new_event_loop()
cls.http_connection.loop = cls.loop
value = cls.loop.run_until_complete(cls.http_connection.connect())
assert value is None
assert cls.http_connection.connection_status.is_connected
assert not cls.http_connection.channel.is_stopped
cls.t = Thread(target=cls.loop.run_forever)
cls.t.start()
@pytest.mark.asyncio
async def test_post_201(self):
"""Test send post request w/ 201 response."""
def request_response_cycle(host, port) -> Tuple[int, str, bytes]:
conn = http.client.HTTPConnection(host, port)
conn.request("POST", "/pets")
response = conn.getresponse()
return response.status, response.reason, response.read()
async def client_thread(host, port) -> Tuple[int, str, bytes]:
executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
executor,
functools.partial(request_response_cycle, host=host, port=port),
)
return result
async def agent_processing(http_connection) -> bool:
# we block here to give it some time for the envelope to make it to the queue
await asyncio.sleep(1)
envelope = await http_connection.receive()
if envelope is not None:
incoming_message = cast(
HttpMessage, HttpSerializer().decode(envelope.message)
)
message = HttpMessage(
performative=HttpMessage.Performative.RESPONSE,
dialogue_reference=("", ""),
target=incoming_message.message_id,
message_id=incoming_message.message_id + 1,
version=incoming_message.version,
headers=incoming_message.headers,
status_code=201,
status_text="Created",
bodyy=b"Response body",
)
response_envelope = Envelope(
to=envelope.sender,
sender=envelope.to,
protocol_id=envelope.protocol_id,
context=envelope.context,
message=HttpSerializer().encode(message),
)
await http_connection.send(response_envelope)
is_exiting_correctly = True
else:
is_exiting_correctly = False
return is_exiting_correctly
client_task = asyncio.ensure_future(client_thread(self.host, self.port))
agent_task = asyncio.ensure_future(agent_processing(self.http_connection))
await asyncio.gather(client_task, agent_task)
response_status_code, response_status_text, response_body = client_task.result()
is_exiting_correctly = agent_task.result()
assert (
response_status_code == 201
and response_status_text == "Created"
and response_body == b"Response body"
)
assert is_exiting_correctly
@classmethod
def teardown_class(cls):
"""Teardown the class."""
cls.loop.call_soon_threadsafe(cls.loop.stop)
cls.t.join()
value = cls.loop.run_until_complete(cls.http_connection.disconnect())
assert value is None
| 37.134266
| 89
| 0.616851
| 3,006
| 26,551
| 5.234531
| 0.087824
| 0.069399
| 0.047537
| 0.014236
| 0.902574
| 0.89749
| 0.886432
| 0.875818
| 0.859422
| 0.859422
| 0
| 0.008531
| 0.280366
| 26,551
| 714
| 90
| 37.186275
| 0.814989
| 0.078528
| 0
| 0.820652
| 0
| 0
| 0.034268
| 0
| 0
| 0
| 0
| 0
| 0.077899
| 1
| 0.038043
| false
| 0
| 0.027174
| 0
| 0.108696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb2e8dd584901034c7921a00bc577f21dedc3f4d
| 1,461
|
py
|
Python
|
align_rudder/envs/utils.py
|
ml-jku/align-rudder
|
26cf4b62a713e180063cefc2921981484ebb9165
|
[
"MIT"
] | 12
|
2020-09-30T08:15:44.000Z
|
2021-12-22T03:36:33.000Z
|
align_rudder/envs/utils.py
|
ml-jku/align-rudder
|
26cf4b62a713e180063cefc2921981484ebb9165
|
[
"MIT"
] | null | null | null |
align_rudder/envs/utils.py
|
ml-jku/align-rudder
|
26cf4b62a713e180063cefc2921981484ebb9165
|
[
"MIT"
] | 1
|
2020-12-09T21:33:28.000Z
|
2020-12-09T21:33:28.000Z
|
import numpy as np
class Utils:
@staticmethod
def to_one_hot(a, len_):
b = np.zeros((a.size, len_))
b[np.arange(a.size), a] = 1
return b
@staticmethod
def to_one_hot_obs(obs, len_):
return np.array([Utils.to_one_hot(np.array(x), len_) for x in obs]).reshape(-1, len_ * 2)
@staticmethod
def to_one_hot_flatten(a, len_):
b = np.zeros((a.shape[0], len_ * len_))
def to_idx(el):
return el[0] * el[1] + el[1]
idx = np.apply_along_axis(to_idx, axis=1, arr=a)
b[np.arange(a.shape[0]), idx] = 1
return b
@staticmethod
def to_one_hot_flatten_obs(obs, len_):
return Utils.to_one_hot_flatten(np.array(obs), len_)
class UtilsRooms:
@staticmethod
def to_one_hot(a, len_, rooms):
b = np.zeros((1, len_ * 2 + rooms))
b[0, [a[0], a[1] + len_, a[2] + len_ * 2]] = 1
return b
@staticmethod
def to_one_hot_obs(obs, len_, rooms):
return np.array([UtilsRooms.to_one_hot(obs, len_, rooms)])
@staticmethod
def to_one_hot_flatten(a, len_):
b = np.zeros((a.shape[0], len_ * len_))
def to_idx(el):
return el[0] * el[1] + el[1]
idx = np.apply_along_axis(to_idx, axis=1, arr=a)
b[np.arange(a.shape[0]), idx] = 1
return b
@staticmethod
def to_one_hot_flatten_obs(obs, len_):
return Utils.to_one_hot_flatten(np.array(obs), len_)
| 26.089286
| 97
| 0.57974
| 239
| 1,461
| 3.280335
| 0.154812
| 0.076531
| 0.122449
| 0.204082
| 0.73852
| 0.73852
| 0.727041
| 0.658163
| 0.658163
| 0.658163
| 0
| 0.023697
| 0.277892
| 1,461
| 55
| 98
| 26.563636
| 0.719431
| 0
| 0
| 0.682927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.243902
| false
| 0
| 0.02439
| 0.146341
| 0.560976
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
cbb2b26da682b7d50be415dbdc32907ea48f0164
| 8,021
|
py
|
Python
|
knrm.py
|
YuanyuanQi/CGTR
|
363d96842a23016aaad5254074b10ecea91bf475
|
[
"MIT"
] | null | null | null |
knrm.py
|
YuanyuanQi/CGTR
|
363d96842a23016aaad5254074b10ecea91bf475
|
[
"MIT"
] | null | null | null |
knrm.py
|
YuanyuanQi/CGTR
|
363d96842a23016aaad5254074b10ecea91bf475
|
[
"MIT"
] | null | null | null |
import os
'''os.environ["CUDA_VISIBLE_DEVICES"] = "1,0"
for item in [1,2,3,4,5]:
os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.stem.sw.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-knrm/base-clean-stem-sw/f{2}".format(item,item,item))
os.system("python seq_train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs.150new --valid_run data/robust/f{1}.valid.run.150new --model_out_dir models/robust/seq-vbert-cedr-knrm/width-27-1-res150/f{2}".format(item,item,item))
print ('DONE',item)#'''
'''os.environ["CUDA_VISIBLE_DEVICES"] = "0,1"
for item in [3]:
os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.textrank.stp800.Maxlen1759.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-knrm/textrank-stp800-doc512-lr0.001/f{2} --lr 0.001".format(item,item,item))
print ('DONE',item)
os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.textrank.stp800.Maxlen1759.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-knrm/textrank-stp800-doc512-lr0.0001/f{2} --lr 0.0001 --epoch 150".format(item,item,item))
print ('DONE',item)#'''
#document.textrank
'''# os.environ["CUDA_VISIBLE_DEVICES"] = "1,0"
# for item in [3]:
# os.system("python train.py --model cedr_conv_knrm --datafiles data/robust/queries.tsv data/robust/documents.textrank.stp1000.Maxlen1759.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv-12kn-256hd-knrm-float16/textrank-stp1000-doc1000-lr0.00002/f{2} --lr 0.00002 --epoch 150".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model cedr_conv_knrm --datafiles data/robust/queries.tsv data/robust/documents.textrank.stp1000.Maxlen1759.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv-12kn-256hd-knrm-float16/textrank-stp1000-doc1000-lr0.00003/f{2} --lr 0.00003 --epoch 150".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model cedr_conv_knrm --datafiles data/robust/queries.tsv data/robust/documents.textrank.stp1000.Maxlen1759.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv-12kn-256hd-knrm-float16/textrank-stp1000-doc1000-lr0.00004/f{2} --lr 0.00004 --epoch 150".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model cedr_conv_knrm --datafiles data/robust/queries.tsv data/robust/documents.textrank.stp1000.Maxlen1759.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv-12kn-256hd-knrm-float16/textrank-stp1000-doc1000-lr0.00005/f{2} --lr 0.00005 --epoch 150".format(item,item,item))
# print ('DONE',item)#'''
#robust04 document
'''# os.environ["CUDA_VISIBLE_DEVICES"] = "0,1"
# for item in [3]:
# os.system("python train.py --model bert_knrm_bk --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/fine-tuning/bert-knrm-orig-float16/orig-doc1200-lr0.00005/f{2} --lr 0.00005 --epoch 150".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-knrm-float16/orig-doc800-lr0.00001/f{2} --lr 0.00001 --epoch 150".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model cedr_conv_gcn_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv-gcn-knrm-float16/orig-doc1600-lr0.0001-lastlayer-bn/f{2} --lr 0.00001 --epoch 200".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-knrm-float16/orig-doc1200-lr0.001-13layers/f{2} --lr 0.001 --epoch 150".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-knrm-float16/orig-doc1200-lr0.00001-13layers/f{2} --lr 0.00001 --epoch 250".format(item,item,item))
# os.system("python train.py --model cedr_gan_sim --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-gan-sim-float16/orig-doc600-lr0.0001/f{2} --lr 0.0001 --epoch 150".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model cedr_conv_gan_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv-gan-knrm-float16/orig-doc600-lr0.000001/f{2} --lr 0.000001 --epoch 150".format(item,item,item))
# print ('DONE',item)#'''
# os.system("python train.py --model cedr_conv_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv13-knrm-float16/orig-doc1200-lr0.00005/f{2} --lr 0.00005 --epoch 250".format(item,item,item))
# print ('DONE',item)
# os.system("python train.py --model cedr_conv_gcn_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/cedr-conv-gcn-knrm-float16-test/orig-doc1200-lr0.00005-lastlayer/f{2} --lr 0.00005 --epoch 250".format(item,item,item))
# print ('DONE',item)
#robust04
# os.environ["CUDA_VISIBLE_DEVICES"] = "1,0"
# for item in [3]:
# os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/fine-tuning/bert-knrm-float16/orig-doc1200-lr0.001-lastlayer-f1/f{2} --lr 0.001 --epoch 150".format(item,item,item))
# print ('DONE',item)
os.environ["CUDA_VISIBLE_DEVICES"] = "0,1"
for item in [3]:
os.system("python train.py --model bert_knrm --datafiles data/robust/queries.tsv data/robust/documents.tsv --qrels data/robust/qrels --train_pairs data/robust/f{0}.train.pairs --valid_run data/robust/f{1}.valid.run --model_out_dir models/robust/fine-tuning/bert-knrm-float16/orig-doc1200-lr0.01-lastlayer-f1/f{2} --lr 0.01 --epoch 150".format(item,item,item))
print ('DONE',item)
| 104.168831
| 412
| 0.734322
| 1,333
| 8,021
| 4.327832
| 0.070518
| 0.164673
| 0.072456
| 0.08563
| 0.943491
| 0.934651
| 0.920784
| 0.91281
| 0.899289
| 0.892529
| 0
| 0.070664
| 0.091385
| 8,021
| 76
| 413
| 105.539474
| 0.720911
| 0.158584
| 0
| 0
| 0
| 0.2
| 0.719758
| 0.368952
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cbb8dc80c2308a98f5d5197aadea80711edf32b6
| 338
|
py
|
Python
|
linear_algebra_by_j_borwnlee/ch_07/vector_arithmetic.py
|
pavelexpertov/scribbles
|
50ebcd6a686fd32be20d401563db7cc87781a428
|
[
"MIT"
] | null | null | null |
linear_algebra_by_j_borwnlee/ch_07/vector_arithmetic.py
|
pavelexpertov/scribbles
|
50ebcd6a686fd32be20d401563db7cc87781a428
|
[
"MIT"
] | null | null | null |
linear_algebra_by_j_borwnlee/ch_07/vector_arithmetic.py
|
pavelexpertov/scribbles
|
50ebcd6a686fd32be20d401563db7cc87781a428
|
[
"MIT"
] | null | null | null |
def add(l1, l2):
return [x + y for x, y in zip(l1, l2)]
def minus(l1, l2):
return [x - y for x, y in zip(l1, l2)]
def multiply(l1, l2):
return [x * y for x, y in zip(l1, l2)]
def divide(l1, l2):
return [x / y for x, y in zip(l1, l2)]
def dot_product(l1, l2):
products = multiply(l1, l2)
return sum(products)
| 18.777778
| 42
| 0.571006
| 68
| 338
| 2.823529
| 0.25
| 0.208333
| 0.260417
| 0.229167
| 0.604167
| 0.604167
| 0.604167
| 0.604167
| 0.604167
| 0.604167
| 0
| 0.080972
| 0.269231
| 338
| 17
| 43
| 19.882353
| 0.696356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0
| 0.363636
| 0.909091
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1dff73c3014c6d29cac64be101db79118171969a
| 915
|
py
|
Python
|
colour_hdri/tonemapping/global_operators/__init__.py
|
colour-science/colour-hdri
|
3a97c4ad8bc328e2fffabf84ac8b56d795dbeb82
|
[
"BSD-3-Clause"
] | 92
|
2015-09-19T22:11:15.000Z
|
2022-03-13T06:37:53.000Z
|
colour_hdri/tonemapping/global_operators/__init__.py
|
colour-science/colour-hdri
|
3a97c4ad8bc328e2fffabf84ac8b56d795dbeb82
|
[
"BSD-3-Clause"
] | 24
|
2017-05-25T08:55:10.000Z
|
2022-03-30T18:26:43.000Z
|
colour_hdri/tonemapping/global_operators/__init__.py
|
colour-science/colour-hdri
|
3a97c4ad8bc328e2fffabf84ac8b56d795dbeb82
|
[
"BSD-3-Clause"
] | 9
|
2016-01-18T17:29:51.000Z
|
2020-11-12T12:54:18.000Z
|
# -*- coding: utf-8 -*-
from .operators import (
tonemapping_operator_simple, tonemapping_operator_normalisation,
tonemapping_operator_gamma, tonemapping_operator_logarithmic,
tonemapping_operator_exponential, tonemapping_operator_logarithmic_mapping,
tonemapping_operator_exponentiation_mapping,
tonemapping_operator_Schlick1994, tonemapping_operator_Tumblin1999,
tonemapping_operator_Reinhard2004, tonemapping_operator_filmic)
__all__ = [
'tonemapping_operator_simple',
'tonemapping_operator_normalisation',
'tonemapping_operator_gamma',
'tonemapping_operator_logarithmic',
'tonemapping_operator_exponential',
'tonemapping_operator_logarithmic_mapping',
'tonemapping_operator_exponentiation_mapping',
'tonemapping_operator_Schlick1994',
'tonemapping_operator_Tumblin1999',
'tonemapping_operator_Reinhard2004',
'tonemapping_operator_filmic',
]
| 38.125
| 79
| 0.821858
| 77
| 915
| 9.090909
| 0.25974
| 0.597143
| 0.171429
| 0.102857
| 0.954286
| 0.954286
| 0.954286
| 0.954286
| 0.954286
| 0.954286
| 0
| 0.030864
| 0.114754
| 915
| 23
| 80
| 39.782609
| 0.833333
| 0.022951
| 0
| 0
| 0
| 0
| 0.401345
| 0.401345
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3804a5675d3a303137a1b9189e165facab2709ca
| 34,388
|
py
|
Python
|
genomics_data_index/test/integration/storage/io/mutation/test_VariationFile.py
|
apetkau/genomics-data-index
|
d0cc119fd57b8cbd701affb1c84450cf7832fa01
|
[
"Apache-2.0"
] | 12
|
2021-05-03T20:56:05.000Z
|
2022-01-04T14:52:19.000Z
|
genomics_data_index/test/integration/storage/io/mutation/test_VariationFile.py
|
apetkau/genomics-data-index
|
d0cc119fd57b8cbd701affb1c84450cf7832fa01
|
[
"Apache-2.0"
] | 30
|
2021-04-26T23:03:40.000Z
|
2022-02-25T18:41:14.000Z
|
genomics_data_index/test/integration/storage/io/mutation/test_VariationFile.py
|
apetkau/genomics-data-index
|
d0cc119fd57b8cbd701affb1c84450cf7832fa01
|
[
"Apache-2.0"
] | null | null | null |
import tempfile
from pathlib import Path
import pytest
from genomics_data_index.storage.MaskedGenomicRegions import MaskedGenomicRegions
from genomics_data_index.storage.io.mutation.SequenceFile import SequenceFile
from genomics_data_index.storage.io.mutation.VariationFile import VariationFile
from genomics_data_index.storage.io.mutation.VcfSnpEffAnnotationParser import VcfSnpEffAnnotationParser
from genomics_data_index.test.integration import data_dir, regular_vcf_dir, variation_dir, reference_file, consensus_dir
from genomics_data_index.test.integration import extra_snippy_dir
from genomics_data_index.test.integration import reference_file_5000_snpeff, snpeff_vcf_file
from genomics_data_index.test.integration import snpeff_sample_vcfs, snpeff_sarscov2_vcfs
from genomics_data_index.test.integration.storage.io import read_vcf_df
@pytest.fixture
def snpeff_parser() -> VcfSnpEffAnnotationParser:
return VcfSnpEffAnnotationParser()
def test_write():
sample_vcf = data_dir / 'SampleA' / 'snps.vcf.gz'
with tempfile.TemporaryDirectory() as out_dir:
out_file = Path(out_dir) / 'out.vcf.gz'
assert not out_file.exists()
file, index = VariationFile(sample_vcf).write(out_file)
assert out_file.exists()
assert file == out_file
assert index.exists()
assert str(file) + '.csi' == str(index)
df = read_vcf_df(out_file)
assert 'SNP' == df[df['POS'] == 293]['TYPE'].tolist()[0]
assert 'INDEL' == df[df['POS'] == 302]['TYPE'].tolist()[0]
assert 'INDEL' == df[df['POS'] == 324]['TYPE'].tolist()[0]
assert 'INDEL' == df[df['POS'] == 374]['TYPE'].tolist()[0]
assert 'OTHER' == df[df['POS'] == 461]['TYPE'].tolist()[0]
assert 'SNP' == df[df['POS'] == 506]['TYPE'].tolist()[0]
def test_write_2():
sample_vcf = data_dir / 'SampleC' / 'snps.vcf.gz'
with tempfile.TemporaryDirectory() as out_dir:
out_file = Path(out_dir) / 'out.vcf.gz'
assert not out_file.exists()
file, index = VariationFile(sample_vcf).write(out_file)
assert out_file.exists()
assert file == out_file
assert index.exists()
assert str(file) + '.csi' == str(index)
df = read_vcf_df(out_file)
assert 'INDEL' == df[df['POS'] == 347]['TYPE'].tolist()[0]
assert 'SNP' == df[df['POS'] == 619]['TYPE'].tolist()[0]
assert 'OTHER' == df[df['POS'] == 1984]['TYPE'].tolist()[0]
def test_write_missing_type_tag():
sample_vcf = regular_vcf_dir / 'SampleA.vcf.gz'
with tempfile.TemporaryDirectory() as out_dir:
out_file = Path(out_dir) / 'out.vcf.gz'
assert not out_file.exists()
VariationFile(sample_vcf).write(out_file)
assert out_file.exists()
df = read_vcf_df(out_file)
assert 'SNP' == df[df['POS'] == 293]['TYPE'].tolist()[0]
assert 'INDEL' == df[df['POS'] == 302]['TYPE'].tolist()[0]
assert 'INDEL' == df[df['POS'] == 324]['TYPE'].tolist()[0]
assert 'INDEL' == df[df['POS'] == 374]['TYPE'].tolist()[0]
assert 'OTHER' == df[df['POS'] == 461]['TYPE'].tolist()[0]
assert 'SNP' == df[df['POS'] == 506]['TYPE'].tolist()[0]
def test_write_2_missing_type_tag():
sample_vcf = regular_vcf_dir / 'SampleC.vcf.gz'
with tempfile.TemporaryDirectory() as out_dir:
out_file = Path(out_dir) / 'out.vcf.gz'
assert not out_file.exists()
VariationFile(sample_vcf).write(out_file)
assert out_file.exists()
df = read_vcf_df(out_file)
assert 'INDEL' == df[df['POS'] == 347]['TYPE'].tolist()[0]
assert 'SNP' == df[df['POS'] == 619]['TYPE'].tolist()[0]
assert 'OTHER' == df[df['POS'] == 1984]['TYPE'].tolist()[0]
def test_write_bcf():
sample_vcf = data_dir / 'SampleA' / 'snps.vcf.gz'
with tempfile.TemporaryDirectory() as out_dir:
out_file = Path(out_dir) / 'out.bcf'
assert not out_file.exists()
VariationFile(sample_vcf).write(out_file)
assert out_file.exists()
def test_consensus_no_mask():
sample_bcf = variation_dir / 'SampleA.bcf'
expected_consensus_file = consensus_dir / 'SampleA-consensus-nomask.fasta.gz'
name, expected_consensus_records = SequenceFile(expected_consensus_file).parse_sequence_file()
expected_consensus_record = expected_consensus_records[0]
seq_records = VariationFile(sample_bcf).consensus(reference_file=reference_file)
assert 1 == len(seq_records)
actual_seq_record = seq_records[0]
assert 5180 == len(actual_seq_record)
assert expected_consensus_record.id == actual_seq_record.id
assert expected_consensus_record.seq == actual_seq_record.seq
def test_consensus_empty_mask():
sample_bcf = variation_dir / 'SampleA.bcf'
empty_mask = MaskedGenomicRegions.empty_mask()
expected_consensus_file = consensus_dir / 'SampleA-consensus-nomask.fasta.gz'
name, expected_consensus_records = SequenceFile(expected_consensus_file).parse_sequence_file()
expected_consensus_record = expected_consensus_records[0]
with tempfile.TemporaryDirectory() as out_dir:
mask_file = Path(out_dir) / 'mask.bed.gz'
empty_mask.write(mask_file)
seq_records = VariationFile(sample_bcf).consensus(reference_file=reference_file,
mask_file=mask_file)
assert 1 == len(seq_records)
actual_seq_record = seq_records[0]
assert 5180 == len(actual_seq_record)
assert expected_consensus_record.id == actual_seq_record.id
assert expected_consensus_record.seq == actual_seq_record.seq
def test_consensus_mask():
sample_bcf = variation_dir / 'SampleA.bcf'
sample_mask_file = variation_dir / 'SampleA.bed.gz'
expected_consensus_file = consensus_dir / 'SampleA-consensus-withmask.fasta.gz'
name, expected_consensus_records = SequenceFile(expected_consensus_file).parse_sequence_file()
expected_consensus_record = expected_consensus_records[0]
seq_records = VariationFile(sample_bcf).consensus(reference_file=reference_file,
mask_file=sample_mask_file)
assert 1 == len(seq_records)
actual_seq_record = seq_records[0]
assert 5180 == len(actual_seq_record)
assert expected_consensus_record.id == actual_seq_record.id
assert expected_consensus_record.seq == actual_seq_record.seq
def test_consensus_mask_over_mutation():
sample_bcf = variation_dir / 'SampleA.bcf'
sample_mask_file = variation_dir / 'SampleA-mask-over-mutation.bed.gz'
expected_consensus_file = consensus_dir / 'SampleA-consensus-withmask-over-mutation.fasta.gz'
name, expected_consensus_records = SequenceFile(expected_consensus_file).parse_sequence_file()
expected_consensus_record = expected_consensus_records[0]
seq_records = VariationFile(sample_bcf).consensus(reference_file=reference_file,
mask_file=sample_mask_file)
assert 1 == len(seq_records)
actual_seq_record = seq_records[0]
assert 5180 == len(actual_seq_record)
assert expected_consensus_record.id == actual_seq_record.id
assert expected_consensus_record.seq == actual_seq_record.seq
def test_union_all_files():
# List like this to guarantee a specific order
variant_files = [
data_dir / 'SampleA' / 'snps.vcf.gz',
data_dir / 'SampleB' / 'snps.vcf.gz',
data_dir / 'SampleC' / 'snps.vcf.gz'
]
union_df = VariationFile.union_all_files(variant_files, include_expression='TYPE="SNP"')
assert 60 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 190)]['COUNT'].values[0]
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 5061)]['COUNT'].values[0]
assert 2 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 4975)]['COUNT'].values[0]
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 2076)]['COUNT'].values[0]
def test_union_one_file():
sample_bcf = variation_dir / 'SampleA.bcf'
union_df = VariationFile.union_all_files([sample_bcf], include_expression='TYPE="SNP"')
assert 26 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 293)]['COUNT'].values[0]
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 4929)]['COUNT'].values[0]
def test_union_batch_size_1():
# List like this to guarantee a specific order
variant_files = [
data_dir / 'SampleA' / 'snps.vcf.gz',
data_dir / 'SampleB' / 'snps.vcf.gz',
data_dir / 'SampleC' / 'snps.vcf.gz'
]
union_df = VariationFile.union_all_files(variant_files, include_expression='TYPE="SNP"', batch_size=1)
assert 60 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 190)]['COUNT'].values[0]
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 5061)]['COUNT'].values[0]
assert 2 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 4975)]['COUNT'].values[0]
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 2076)]['COUNT'].values[0]
def test_union_batch_size_2_all_data():
# List like this to guarantee a specific order
variant_files = [
data_dir / 'SampleA' / 'snps.vcf.gz',
data_dir / 'SampleB' / 'snps.vcf.gz',
data_dir / 'SampleC' / 'snps.vcf.gz'
]
union_df = VariationFile.union_all_files(variant_files, batch_size=2)
assert 112 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 190)]['COUNT'].values[0]
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 5061)]['COUNT'].values[0]
assert 2 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 4975)]['COUNT'].values[0]
assert 1 == union_df[(union_df['CHROM'] == 'reference') & (union_df['POS'] == 2076)]['COUNT'].values[0]
def test_union_many_files_batch_size_2_more_data():
# List like this to guarantee a specific order
variant_files = [
data_dir / 'SampleA' / 'snps.vcf.gz',
data_dir / 'SampleB' / 'snps.vcf.gz',
data_dir / 'SampleC' / 'snps.vcf.gz',
extra_snippy_dir / 'SampleB2.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB3.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB4.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5-different-allele.fill-tags.vcf.gz',
]
union_df = VariationFile.union_all_files(variant_files, batch_size=2)
assert 115 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 6 == union_df[union_df['ID'] == 'reference:190:A:G']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:5061:G:A']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4975:T:C']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4975:T:CAT']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:2076:A:T']['COUNT'].values[0]
assert 2 == union_df[union_df['ID'] == 'reference:1483:AAAGAGGGGCTGCTGGAGCCG:A']['COUNT'].values[0]
assert 3 == union_df[union_df['ID'] == 'reference:1640:C:A']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4693:C:CGA']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4693:C:G']['COUNT'].values[0]
assert 3 == union_df[union_df['ID'] == 'reference:883:CACATG:C']['COUNT'].values[0]
assert 5 == union_df[union_df['ID'] == 'reference:349:AAGT:A']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:349:AAGT:T']['COUNT'].values[0]
def test_union_many_files_batch_size_2_with_empty_vcf():
# List like this to guarantee a specific order
variant_files = [
data_dir / 'SampleA' / 'snps.vcf.gz',
data_dir / 'SampleB' / 'snps.vcf.gz',
data_dir / 'SampleC' / 'snps.vcf.gz',
extra_snippy_dir / 'SampleB2.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB3.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB4.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5-different-allele.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB-empty.snps.fill-tags.vcf.gz'
]
union_df = VariationFile.union_all_files(variant_files, batch_size=2)
print(union_df)
assert 115 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 6 == union_df[union_df['ID'] == 'reference:190:A:G']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:5061:G:A']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4975:T:C']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4975:T:CAT']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:2076:A:T']['COUNT'].values[0]
assert 2 == union_df[union_df['ID'] == 'reference:1483:AAAGAGGGGCTGCTGGAGCCG:A']['COUNT'].values[0]
assert 3 == union_df[union_df['ID'] == 'reference:1640:C:A']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4693:C:CGA']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4693:C:G']['COUNT'].values[0]
assert 3 == union_df[union_df['ID'] == 'reference:883:CACATG:C']['COUNT'].values[0]
assert 5 == union_df[union_df['ID'] == 'reference:349:AAGT:A']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:349:AAGT:T']['COUNT'].values[0]
def test_union_many_files_batch_size_odd_cores_3():
# List like this to guarantee a specific order
variant_files = [
data_dir / 'SampleA' / 'snps.vcf.gz',
data_dir / 'SampleB' / 'snps.vcf.gz',
data_dir / 'SampleC' / 'snps.vcf.gz',
extra_snippy_dir / 'SampleB2.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB3.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB4.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5-different-allele.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB-empty.snps.fill-tags.vcf.gz'
]
union_df = VariationFile.union_all_files(variant_files, ncores=3, batch_size=3)
print(union_df)
assert 115 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 6 == union_df[union_df['ID'] == 'reference:190:A:G']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:5061:G:A']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4975:T:C']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4975:T:CAT']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:2076:A:T']['COUNT'].values[0]
assert 2 == union_df[union_df['ID'] == 'reference:1483:AAAGAGGGGCTGCTGGAGCCG:A']['COUNT'].values[0]
assert 3 == union_df[union_df['ID'] == 'reference:1640:C:A']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4693:C:CGA']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4693:C:G']['COUNT'].values[0]
assert 3 == union_df[union_df['ID'] == 'reference:883:CACATG:C']['COUNT'].values[0]
assert 5 == union_df[union_df['ID'] == 'reference:349:AAGT:A']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:349:AAGT:T']['COUNT'].values[0]
def test_union_many_files_ambiguous():
# List like this to guarantee a specific order
variant_files = [
data_dir / 'SampleA' / 'snps.vcf.gz',
data_dir / 'SampleB' / 'snps.vcf.gz',
data_dir / 'SampleC' / 'snps.vcf.gz',
extra_snippy_dir / 'SampleB2.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB3.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB4.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5.snps.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5-different-allele.fill-tags.vcf.gz',
extra_snippy_dir / 'SampleB5-different-allele-ambiguous.vcf.gz',
]
union_df = VariationFile.union_all_files(variant_files)
print(union_df)
assert 119 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
assert 6 == union_df[union_df['ID'] == 'reference:190:A:G']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:190:A:N']['COUNT'].values[0]
assert 7 == union_df[union_df['ID'] == 'reference:5061:G:A']['COUNT'].values[0]
assert 2 == union_df[union_df['ID'] == 'reference:1483:AAAGAGGGGCTGCTGGAGCCG:A']['COUNT'].values[0]
assert 5 == union_df[union_df['ID'] == 'reference:349:AAGT:A']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:349:AAGT:T']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:349:ANGT:T']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4693:C:CGA']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4693:C:G']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4693:N:G']['COUNT'].values[0]
assert 6 == union_df[union_df['ID'] == 'reference:4975:T:C']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4975:T:CAT']['COUNT'].values[0]
assert 1 == union_df[union_df['ID'] == 'reference:4975:T:CNT']['COUNT'].values[0]
def test_union_many_files_batch_size_2_single_empty_vcf():
# List like this to guarantee a specific order
variant_files = [
extra_snippy_dir / 'SampleB-empty.snps.fill-tags.vcf.gz'
]
union_df = VariationFile.union_all_files(variant_files, batch_size=2)
print(union_df)
assert 0 == len(union_df)
assert ['ID', 'CHROM', 'POS', 'REF', 'ALT', 'COUNT'] == union_df.columns.tolist()
def test_read_features(snpeff_parser):
vcf_file = data_dir / 'SampleA' / 'snps.vcf.gz'
df = VariationFile(vcf_file).read_features('SampleA', snpeff_parser=snpeff_parser)
assert 46 == len(df), 'Data fram has incorrect length'
assert {'snps.vcf.gz'} == set(df['FILE'].tolist()), 'Incorrect filename'
assert {'SampleA'} == set(df['SAMPLE'].tolist()), 'Incorrect sample name'
v = df[df['POS'] == 461]
assert 'AAAT' == v['REF'].values[0], 'Incorrect reference'
assert 'G' == v['ALT'].values[0], 'Incorrect alt'
v = df[df['POS'] == 1048]
assert 'C' == v['REF'].values[0], 'Incorrect reference'
assert 'G' == v['ALT'].values[0], 'Incorrect alt'
v = df[df['POS'] == 1253]
assert 'T' == v['REF'].values[0], 'Incorrect reference'
assert 'TAA' == v['ALT'].values[0], 'Incorrect alt'
v = df[df['POS'] == 3656]
assert 'CATT' == v['REF'].values[0], 'Incorrect reference'
assert 'C' == v['ALT'].values[0], 'Incorrect alt'
def test_read_features_snpeff(snpeff_parser):
sample_10_014 = VariationFile(
snpeff_sample_vcfs['SH10-014']).read_features('SH10-014', snpeff_parser=snpeff_parser).sort_values('POS')
sample_14_001 = VariationFile(
snpeff_sample_vcfs['SH14-001']).read_features('SH14-001', snpeff_parser=snpeff_parser).sort_values('POS')
sample_14_014 = VariationFile(
snpeff_sample_vcfs['SH14-014']).read_features('SH14-014', snpeff_parser=snpeff_parser).sort_values('POS')
assert 139 == len(sample_10_014)
assert ['SAMPLE', 'CHROM', 'POS', 'REF', 'ALT', 'TYPE', 'FILE', 'VARIANT_ID',
'ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p'] == list(sample_10_014.columns)
# snv/snp
sample_10_014_varA = sample_10_014[sample_10_014['POS'] == 140658]
assert 1 == len(sample_10_014_varA)
assert ['SH10-014', 'NC_011083', 140658, 'C', 'A', 'snp', 'SH10-014.vcf.gz', 'NC_011083:140658:C:A',
'A', 'missense_variant', 'MODERATE', 'murF', 'SEHA_RS01180', 'transcript', 'protein_coding',
'c.497C>A', 'p.Ala166Glu'] == sample_10_014_varA[
sample_10_014_varA['ANN.Annotation'] == 'missense_variant'].iloc[0].tolist()
# del
sample_10_014_varB = sample_10_014[sample_10_014['POS'] == 1125996]
assert 1 == len(sample_10_014_varB)
assert ['SH10-014', 'NC_011083', 1125996, 'CG', 'C', 'del', 'SH10-014.vcf.gz', 'NC_011083:1125996:CG:C',
'C', 'frameshift_variant', 'HIGH', 'SEHA_RS05995', 'SEHA_RS05995', 'transcript', 'protein_coding',
'c.418delG', 'p.Glu140fs'] == sample_10_014_varB[
sample_10_014_varB['ANN.Annotation'] == 'frameshift_variant'].iloc[0].tolist()
# ins
sample_10_014_varC = sample_10_014[sample_10_014['POS'] == 1246085]
assert 1 == len(sample_10_014_varC)
assert ['SH10-014', 'NC_011083', 1246085, 'C', 'CG', 'ins', 'SH10-014.vcf.gz', 'NC_011083:1246085:C:CG',
'CG', 'frameshift_variant', 'HIGH', 'mdtG', 'SEHA_RS06605', 'transcript', 'protein_coding',
'c.722dupC', 'p.Leu242fs'] == sample_10_014_varC[
sample_10_014_varC['ANN.Annotation'] == 'frameshift_variant'].iloc[0].tolist()
# complex
sample_10_014_varD = sample_10_014[sample_10_014['POS'] == 3535121]
assert 1 == len(sample_10_014_varD)
assert ['SH10-014', 'NC_011083', 3535121, 'CGCGA', 'TGTGG', 'complex', 'SH10-014.vcf.gz',
'NC_011083:3535121:CGCGA:TGTGG',
'TGTGG', 'missense_variant', 'MODERATE', 'oadA', 'SEHA_RS17780', 'transcript', 'protein_coding',
'c.1119_1123delTCGCGinsCCACA', 'p.ArgAla374HisThr'] == sample_10_014_varD[
sample_10_014_varD['ANN.Annotation'] == 'missense_variant'].iloc[0].tolist()
assert 115 == len(sample_14_001)
assert ['SAMPLE', 'CHROM', 'POS', 'REF', 'ALT', 'TYPE', 'FILE', 'VARIANT_ID',
'ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p'] == list(sample_14_001.columns)
sample_14_001_var = sample_14_001[sample_14_001['POS'] == 140658]
assert 1 == len(sample_14_001_var)
assert ['SH14-001', 'NC_011083', 140658, 'C', 'A', 'snp', 'SH14-001.vcf.gz', 'NC_011083:140658:C:A',
'A', 'missense_variant', 'MODERATE', 'murF', 'SEHA_RS01180', 'transcript', 'protein_coding',
'c.497C>A', 'p.Ala166Glu'] == sample_14_001_var[
sample_14_001_var['ANN.Annotation'] == 'missense_variant'].iloc[0].tolist()
assert 107 == len(sample_14_014)
assert ['SAMPLE', 'CHROM', 'POS', 'REF', 'ALT', 'TYPE', 'FILE', 'VARIANT_ID',
'ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p'] == list(sample_14_014.columns)
sample_14_014_var = sample_14_014[sample_14_014['POS'] == 298472]
assert 1 == len(sample_14_014_var)
assert ['SH14-014', 'NC_011083', 298472, 'A', 'C', 'snp', 'SH14-014.vcf.gz', 'NC_011083:298472:A:C',
'C', 'intergenic_region', 'MODIFIER', 'SEHA_RS01880-SEHA_RS01885', 'SEHA_RS01880-SEHA_RS01885',
'intergenic_region', 'n.298472A>C'] == sample_14_014_var[
sample_14_014_var['ANN.Annotation'] == 'intergenic_region'].drop(
['ANN.Transcript_BioType', 'ANN.HGVS.p'], axis='columns').iloc[0].tolist()
assert {True} == set(sample_14_014_var[sample_14_014_var['ANN.Annotation'] == 'intergenic_region'] \
[['ANN.Transcript_BioType', 'ANN.HGVS.p']].iloc[0].isna().tolist())
def test_read_features_snpeff_sars_cov_2(snpeff_parser):
sample_sarscov2_1 = VariationFile(
snpeff_sarscov2_vcfs['USA/CA-CDPH-3000143037/2021']
).read_features('USA/CA-CDPH-3000143037/2021', snpeff_parser=snpeff_parser).sort_values('POS')
assert ['SAMPLE', 'CHROM', 'POS', 'REF', 'ALT', 'TYPE', 'FILE', 'VARIANT_ID',
'ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p'] == list(sample_sarscov2_1.columns)
assert 31 == len(sample_sarscov2_1)
sample_sarscov2_1['ANN.Annotation'] = sample_sarscov2_1['ANN.Annotation'].astype(str)
sample_sarscov2_1['ANN.Annotation_Impact'] = sample_sarscov2_1['ANN.Annotation_Impact'].astype(str)
# ORF1ab (ORF1a region)
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 3948]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 3948, 'A', 'G', 'SNP', 'USA__CA-CDPH-3000143037__2021.vcf.gz',
'NC_045512.2:3948:A:G', 'G', 'missense_variant', 'MODERATE', 'ORF1ab', 'GU280_gp01', 'transcript',
'protein_coding', 'c.3683A>G', 'p.D1228G'] == sample_sarscov2_1_var.iloc[0].tolist()
# ORF1ab (ORF1a region)
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 3037]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 3037, 'C', 'T', 'SNP', 'USA__CA-CDPH-3000143037__2021.vcf.gz',
'NC_045512.2:3037:C:T', 'T', 'synonymous_variant', 'LOW', 'ORF1ab', 'GU280_gp01', 'transcript',
'protein_coding', 'c.2772C>T', 'p.F924F'] == sample_sarscov2_1_var.iloc[0].tolist()
# ORF1ab (ORF1b region)
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 19220]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 19220, 'C', 'T', 'SNP',
'USA__CA-CDPH-3000143037__2021.vcf.gz',
'NC_045512.2:19220:C:T', 'T', 'missense_variant', 'MODERATE', 'ORF1ab', 'GU280_gp01', 'transcript',
'protein_coding', 'c.18956C>T', 'p.A6319V'] == sample_sarscov2_1_var.iloc[0].tolist()
# S
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 22917]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 22917, 'T', 'G', 'SNP',
'USA__CA-CDPH-3000143037__2021.vcf.gz',
'NC_045512.2:22917:T:G', 'G', 'missense_variant', 'MODERATE', 'S', 'GU280_gp02', 'transcript',
'protein_coding', 'c.1355T>G', 'p.L452R'] == sample_sarscov2_1_var.iloc[0].tolist()
# ORF7b
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 27874]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 27874, 'C', 'T', 'SNP',
'USA__CA-CDPH-3000143037__2021.vcf.gz',
'NC_045512.2:27874:C:T', 'T', 'missense_variant', 'MODERATE', 'ORF7b', 'GU280_gp08', 'transcript',
'protein_coding', 'c.119C>T', 'p.T40I'] == sample_sarscov2_1_var.iloc[0].tolist()
# intergenic
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 210].fillna('<NA>')
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 210, 'G', 'T', 'SNP', 'USA__CA-CDPH-3000143037__2021.vcf.gz',
'NC_045512.2:210:G:T', 'T', 'intergenic_region', 'MODIFIER', 'CHR_START-ORF1ab', 'CHR_START-GU280_gp01',
'intergenic_region', '<NA>', 'n.210G>T', '<NA>'] == sample_sarscov2_1_var.iloc[0].tolist()
def test_read_features_sars_cov_2_no_snpeff_annotation(snpeff_parser):
sample_sarscov2_1 = VariationFile(
snpeff_sarscov2_vcfs['USA/CA-CDPH-3000143037/2021.noann']
).read_features('USA/CA-CDPH-3000143037/2021', snpeff_parser=snpeff_parser).sort_values('POS')
ann_columns = ['ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p']
assert ['SAMPLE', 'CHROM', 'POS', 'REF', 'ALT', 'TYPE', 'FILE', 'VARIANT_ID',
'ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p'] == list(sample_sarscov2_1.columns)
assert 31 == len(sample_sarscov2_1)
# ORF1ab (ORF1a region)
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 3948]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 3948, 'A', 'G', 'SNP',
'USA__CA-CDPH-3000143037__2021.noann.vcf.gz', 'NC_045512.2:3948:A:G'] == sample_sarscov2_1_var.drop(
ann_columns, axis='columns').iloc[0].tolist()
assert {True} == set(sample_sarscov2_1_var[ann_columns].iloc[0].isna().tolist())
# ORF1ab (ORF1a region)
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 3037]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 3037, 'C', 'T', 'SNP',
'USA__CA-CDPH-3000143037__2021.noann.vcf.gz', 'NC_045512.2:3037:C:T'] == sample_sarscov2_1_var.drop(
ann_columns, axis='columns').iloc[0].tolist()
assert {True} == set(sample_sarscov2_1_var[ann_columns].iloc[0].isna().tolist())
# ORF1ab (ORF1b region)
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 19220]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 19220, 'C', 'T', 'SNP',
'USA__CA-CDPH-3000143037__2021.noann.vcf.gz', 'NC_045512.2:19220:C:T'] == sample_sarscov2_1_var.drop(
ann_columns, axis='columns').iloc[0].tolist()
assert {True} == set(sample_sarscov2_1_var[ann_columns].iloc[0].isna().tolist())
# S
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 22917]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 22917, 'T', 'G', 'SNP',
'USA__CA-CDPH-3000143037__2021.noann.vcf.gz', 'NC_045512.2:22917:T:G'] == sample_sarscov2_1_var.drop(
ann_columns, axis='columns').iloc[0].tolist()
assert {True} == set(sample_sarscov2_1_var[ann_columns].iloc[0].isna().tolist())
# ORF7b
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 27874]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 27874, 'C', 'T', 'SNP',
'USA__CA-CDPH-3000143037__2021.noann.vcf.gz', 'NC_045512.2:27874:C:T'] == sample_sarscov2_1_var.drop(
ann_columns, axis='columns').iloc[0].tolist()
assert {True} == set(sample_sarscov2_1_var[ann_columns].iloc[0].isna().tolist())
# intergenic
sample_sarscov2_1_var = sample_sarscov2_1[sample_sarscov2_1['POS'] == 210]
assert 1 == len(sample_sarscov2_1_var)
assert ['USA/CA-CDPH-3000143037/2021', 'NC_045512.2', 210, 'G', 'T', 'SNP',
'USA__CA-CDPH-3000143037__2021.noann.vcf.gz', 'NC_045512.2:210:G:T'] == sample_sarscov2_1_var.drop(
ann_columns, axis='columns').iloc[0].tolist()
assert {True} == set(sample_sarscov2_1_var[ann_columns].iloc[0].isna().tolist())
def test_annotate(snpeff_parser):
with tempfile.TemporaryDirectory() as out_dir:
database_dir = Path(out_dir)
output_vcf_file = database_dir / 'output.vcf.gz'
variation_file = VariationFile(snpeff_vcf_file)
snpeff_database = SequenceFile(reference_file_5000_snpeff).create_snpeff_database(database_dir)
annotated_variation_file = variation_file.annotate(snpeff_database=snpeff_database,
annotated_vcf=output_vcf_file)
assert output_vcf_file == annotated_variation_file.file
# Verify VCF annotation contents
vcf_annotation_df = annotated_variation_file.read_features('SampleA', snpeff_parser=snpeff_parser).sort_values(
'POS')
assert 2 == len(vcf_annotation_df)
assert ['SAMPLE', 'CHROM', 'POS', 'REF', 'ALT', 'TYPE', 'FILE', 'VARIANT_ID',
'ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p'] == list(
vcf_annotation_df.columns)
assert ['NC_011083.1:195:C:G', 'NC_011083.1:207:C:G'] == vcf_annotation_df['VARIANT_ID'].tolist()
assert ['SNP', 'SNP'] == vcf_annotation_df['TYPE'].tolist()
assert ['missense_variant', 'synonymous_variant'] == vcf_annotation_df['ANN.Annotation'].tolist()
assert ['SEHA_RS00560', 'SEHA_RS00560'] == vcf_annotation_df['ANN.Gene_ID'].tolist()
assert ['thrL', 'thrL'] == vcf_annotation_df['ANN.Gene_Name'].tolist()
assert ['c.6C>G', 'c.18C>G'] == vcf_annotation_df['ANN.HGVS.c'].tolist()
assert ['p.N2K', 'p.T6T'] == vcf_annotation_df['ANN.HGVS.p'].tolist()
# Original file should still exist and be unannotated
vcf_no_annotation_df = variation_file.read_features('SampleA', snpeff_parser=snpeff_parser).sort_values('POS')
assert 2 == len(vcf_no_annotation_df)
assert ['SAMPLE', 'CHROM', 'POS', 'REF', 'ALT', 'TYPE', 'FILE', 'VARIANT_ID',
'ANN.Allele', 'ANN.Annotation', 'ANN.Annotation_Impact', 'ANN.Gene_Name', 'ANN.Gene_ID',
'ANN.Feature_Type', 'ANN.Transcript_BioType', 'ANN.HGVS.c', 'ANN.HGVS.p'] == list(
vcf_no_annotation_df.columns)
assert ['NC_011083.1:195:C:G', 'NC_011083.1:207:C:G'] == vcf_no_annotation_df['VARIANT_ID'].tolist()
assert ['SNP', 'SNP'] == vcf_no_annotation_df['TYPE'].tolist()
assert all(vcf_no_annotation_df['ANN.Annotation'].isna())
assert all(vcf_no_annotation_df['ANN.Gene_ID'].isna())
assert all(vcf_no_annotation_df['ANN.HGVS.c'].isna())
assert all(vcf_no_annotation_df['ANN.HGVS.p'].isna())
| 52.340944
| 120
| 0.656043
| 4,821
| 34,388
| 4.411948
| 0.064924
| 0.056276
| 0.053597
| 0.041467
| 0.874048
| 0.844429
| 0.81166
| 0.766902
| 0.741514
| 0.728209
| 0
| 0.077937
| 0.168315
| 34,388
| 656
| 121
| 52.420732
| 0.665769
| 0.018495
| 0
| 0.597586
| 0
| 0
| 0.262654
| 0.083262
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.04829
| false
| 0
| 0.024145
| 0.002012
| 0.074447
| 0.008048
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
380877c8ff4a71bf178da3cd1d696808024ca34b
| 128
|
py
|
Python
|
anvil/sub_rig_templates/biped_leg.py
|
AndresMWeber/Anvil
|
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
|
[
"Apache-2.0"
] | 3
|
2019-11-22T04:38:06.000Z
|
2022-01-19T08:27:18.000Z
|
anvil/sub_rig_templates/biped_leg.py
|
AndresMWeber/Anvil
|
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
|
[
"Apache-2.0"
] | 28
|
2018-02-01T20:39:42.000Z
|
2018-04-26T17:25:23.000Z
|
anvil/sub_rig_templates/biped_leg.py
|
AndresMWeber/Anvil
|
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
|
[
"Apache-2.0"
] | 1
|
2018-03-11T06:47:26.000Z
|
2018-03-11T06:47:26.000Z
|
from limb import Limb
class BipedLeg(Limb):
BUILT_IN_META_DATA = Limb.BUILT_IN_META_DATA.merge({'name': 'leg'}, new=True)
| 21.333333
| 81
| 0.734375
| 21
| 128
| 4.190476
| 0.666667
| 0.204545
| 0.25
| 0.340909
| 0.431818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132813
| 128
| 5
| 82
| 25.6
| 0.792793
| 0
| 0
| 0
| 0
| 0
| 0.054688
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
380e954db5d10829faf1e4c91d126ec2e099c3b3
| 40,282
|
py
|
Python
|
sdk/python/pulumi_aws_native/dynamodb/_inputs.py
|
pulumi/pulumi-aws-native
|
1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3
|
[
"Apache-2.0"
] | 29
|
2021-09-30T19:32:07.000Z
|
2022-03-22T21:06:08.000Z
|
sdk/python/pulumi_aws_native/dynamodb/_inputs.py
|
pulumi/pulumi-aws-native
|
1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3
|
[
"Apache-2.0"
] | 232
|
2021-09-30T19:26:26.000Z
|
2022-03-31T23:22:06.000Z
|
sdk/python/pulumi_aws_native/dynamodb/_inputs.py
|
pulumi/pulumi-aws-native
|
1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3
|
[
"Apache-2.0"
] | 4
|
2021-11-10T19:42:01.000Z
|
2022-02-05T10:15:49.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GlobalTableAttributeDefinitionArgs',
'GlobalTableCapacityAutoScalingSettingsArgs',
'GlobalTableContributorInsightsSpecificationArgs',
'GlobalTableGlobalSecondaryIndexArgs',
'GlobalTableKeySchemaArgs',
'GlobalTableLocalSecondaryIndexArgs',
'GlobalTablePointInTimeRecoverySpecificationArgs',
'GlobalTableProjectionArgs',
'GlobalTableReadProvisionedThroughputSettingsArgs',
'GlobalTableReplicaGlobalSecondaryIndexSpecificationArgs',
'GlobalTableReplicaSSESpecificationArgs',
'GlobalTableReplicaSpecificationArgs',
'GlobalTableSSESpecificationArgs',
'GlobalTableStreamSpecificationArgs',
'GlobalTableTagArgs',
'GlobalTableTargetTrackingScalingPolicyConfigurationArgs',
'GlobalTableTimeToLiveSpecificationArgs',
'GlobalTableWriteProvisionedThroughputSettingsArgs',
'TableAttributeDefinitionArgs',
'TableContributorInsightsSpecificationArgs',
'TableGlobalSecondaryIndexArgs',
'TableKeySchemaArgs',
'TableKinesisStreamSpecificationArgs',
'TableLocalSecondaryIndexArgs',
'TablePointInTimeRecoverySpecificationArgs',
'TableProjectionArgs',
'TableProvisionedThroughputArgs',
'TableSSESpecificationArgs',
'TableStreamSpecificationArgs',
'TableTagArgs',
'TableTimeToLiveSpecificationArgs',
]
@pulumi.input_type
class GlobalTableAttributeDefinitionArgs:
def __init__(__self__, *,
attribute_name: pulumi.Input[str],
attribute_type: pulumi.Input[str]):
pulumi.set(__self__, "attribute_name", attribute_name)
pulumi.set(__self__, "attribute_type", attribute_type)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_name", value)
@property
@pulumi.getter(name="attributeType")
def attribute_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "attribute_type")
@attribute_type.setter
def attribute_type(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_type", value)
@pulumi.input_type
class GlobalTableCapacityAutoScalingSettingsArgs:
def __init__(__self__, *,
max_capacity: pulumi.Input[int],
min_capacity: pulumi.Input[int],
target_tracking_scaling_policy_configuration: pulumi.Input['GlobalTableTargetTrackingScalingPolicyConfigurationArgs'],
seed_capacity: Optional[pulumi.Input[int]] = None):
pulumi.set(__self__, "max_capacity", max_capacity)
pulumi.set(__self__, "min_capacity", min_capacity)
pulumi.set(__self__, "target_tracking_scaling_policy_configuration", target_tracking_scaling_policy_configuration)
if seed_capacity is not None:
pulumi.set(__self__, "seed_capacity", seed_capacity)
@property
@pulumi.getter(name="maxCapacity")
def max_capacity(self) -> pulumi.Input[int]:
return pulumi.get(self, "max_capacity")
@max_capacity.setter
def max_capacity(self, value: pulumi.Input[int]):
pulumi.set(self, "max_capacity", value)
@property
@pulumi.getter(name="minCapacity")
def min_capacity(self) -> pulumi.Input[int]:
return pulumi.get(self, "min_capacity")
@min_capacity.setter
def min_capacity(self, value: pulumi.Input[int]):
pulumi.set(self, "min_capacity", value)
@property
@pulumi.getter(name="targetTrackingScalingPolicyConfiguration")
def target_tracking_scaling_policy_configuration(self) -> pulumi.Input['GlobalTableTargetTrackingScalingPolicyConfigurationArgs']:
return pulumi.get(self, "target_tracking_scaling_policy_configuration")
@target_tracking_scaling_policy_configuration.setter
def target_tracking_scaling_policy_configuration(self, value: pulumi.Input['GlobalTableTargetTrackingScalingPolicyConfigurationArgs']):
pulumi.set(self, "target_tracking_scaling_policy_configuration", value)
@property
@pulumi.getter(name="seedCapacity")
def seed_capacity(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "seed_capacity")
@seed_capacity.setter
def seed_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "seed_capacity", value)
@pulumi.input_type
class GlobalTableContributorInsightsSpecificationArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool]):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class GlobalTableGlobalSecondaryIndexArgs:
def __init__(__self__, *,
index_name: pulumi.Input[str],
key_schema: pulumi.Input[Sequence[pulumi.Input['GlobalTableKeySchemaArgs']]],
projection: pulumi.Input['GlobalTableProjectionArgs'],
write_provisioned_throughput_settings: Optional[pulumi.Input['GlobalTableWriteProvisionedThroughputSettingsArgs']] = None):
pulumi.set(__self__, "index_name", index_name)
pulumi.set(__self__, "key_schema", key_schema)
pulumi.set(__self__, "projection", projection)
if write_provisioned_throughput_settings is not None:
pulumi.set(__self__, "write_provisioned_throughput_settings", write_provisioned_throughput_settings)
@property
@pulumi.getter(name="indexName")
def index_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "index_name")
@index_name.setter
def index_name(self, value: pulumi.Input[str]):
pulumi.set(self, "index_name", value)
@property
@pulumi.getter(name="keySchema")
def key_schema(self) -> pulumi.Input[Sequence[pulumi.Input['GlobalTableKeySchemaArgs']]]:
return pulumi.get(self, "key_schema")
@key_schema.setter
def key_schema(self, value: pulumi.Input[Sequence[pulumi.Input['GlobalTableKeySchemaArgs']]]):
pulumi.set(self, "key_schema", value)
@property
@pulumi.getter
def projection(self) -> pulumi.Input['GlobalTableProjectionArgs']:
return pulumi.get(self, "projection")
@projection.setter
def projection(self, value: pulumi.Input['GlobalTableProjectionArgs']):
pulumi.set(self, "projection", value)
@property
@pulumi.getter(name="writeProvisionedThroughputSettings")
def write_provisioned_throughput_settings(self) -> Optional[pulumi.Input['GlobalTableWriteProvisionedThroughputSettingsArgs']]:
return pulumi.get(self, "write_provisioned_throughput_settings")
@write_provisioned_throughput_settings.setter
def write_provisioned_throughput_settings(self, value: Optional[pulumi.Input['GlobalTableWriteProvisionedThroughputSettingsArgs']]):
pulumi.set(self, "write_provisioned_throughput_settings", value)
@pulumi.input_type
class GlobalTableKeySchemaArgs:
def __init__(__self__, *,
attribute_name: pulumi.Input[str],
key_type: pulumi.Input[str]):
pulumi.set(__self__, "attribute_name", attribute_name)
pulumi.set(__self__, "key_type", key_type)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_name", value)
@property
@pulumi.getter(name="keyType")
def key_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "key_type")
@key_type.setter
def key_type(self, value: pulumi.Input[str]):
pulumi.set(self, "key_type", value)
@pulumi.input_type
class GlobalTableLocalSecondaryIndexArgs:
def __init__(__self__, *,
index_name: pulumi.Input[str],
key_schema: pulumi.Input[Sequence[pulumi.Input['GlobalTableKeySchemaArgs']]],
projection: pulumi.Input['GlobalTableProjectionArgs']):
pulumi.set(__self__, "index_name", index_name)
pulumi.set(__self__, "key_schema", key_schema)
pulumi.set(__self__, "projection", projection)
@property
@pulumi.getter(name="indexName")
def index_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "index_name")
@index_name.setter
def index_name(self, value: pulumi.Input[str]):
pulumi.set(self, "index_name", value)
@property
@pulumi.getter(name="keySchema")
def key_schema(self) -> pulumi.Input[Sequence[pulumi.Input['GlobalTableKeySchemaArgs']]]:
return pulumi.get(self, "key_schema")
@key_schema.setter
def key_schema(self, value: pulumi.Input[Sequence[pulumi.Input['GlobalTableKeySchemaArgs']]]):
pulumi.set(self, "key_schema", value)
@property
@pulumi.getter
def projection(self) -> pulumi.Input['GlobalTableProjectionArgs']:
return pulumi.get(self, "projection")
@projection.setter
def projection(self, value: pulumi.Input['GlobalTableProjectionArgs']):
pulumi.set(self, "projection", value)
@pulumi.input_type
class GlobalTablePointInTimeRecoverySpecificationArgs:
def __init__(__self__, *,
point_in_time_recovery_enabled: Optional[pulumi.Input[bool]] = None):
if point_in_time_recovery_enabled is not None:
pulumi.set(__self__, "point_in_time_recovery_enabled", point_in_time_recovery_enabled)
@property
@pulumi.getter(name="pointInTimeRecoveryEnabled")
def point_in_time_recovery_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "point_in_time_recovery_enabled")
@point_in_time_recovery_enabled.setter
def point_in_time_recovery_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "point_in_time_recovery_enabled", value)
@pulumi.input_type
class GlobalTableProjectionArgs:
def __init__(__self__, *,
non_key_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
projection_type: Optional[pulumi.Input[str]] = None):
if non_key_attributes is not None:
pulumi.set(__self__, "non_key_attributes", non_key_attributes)
if projection_type is not None:
pulumi.set(__self__, "projection_type", projection_type)
@property
@pulumi.getter(name="nonKeyAttributes")
def non_key_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "non_key_attributes")
@non_key_attributes.setter
def non_key_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "non_key_attributes", value)
@property
@pulumi.getter(name="projectionType")
def projection_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "projection_type")
@projection_type.setter
def projection_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "projection_type", value)
@pulumi.input_type
class GlobalTableReadProvisionedThroughputSettingsArgs:
def __init__(__self__, *,
read_capacity_auto_scaling_settings: Optional[pulumi.Input['GlobalTableCapacityAutoScalingSettingsArgs']] = None,
read_capacity_units: Optional[pulumi.Input[int]] = None):
if read_capacity_auto_scaling_settings is not None:
pulumi.set(__self__, "read_capacity_auto_scaling_settings", read_capacity_auto_scaling_settings)
if read_capacity_units is not None:
pulumi.set(__self__, "read_capacity_units", read_capacity_units)
@property
@pulumi.getter(name="readCapacityAutoScalingSettings")
def read_capacity_auto_scaling_settings(self) -> Optional[pulumi.Input['GlobalTableCapacityAutoScalingSettingsArgs']]:
return pulumi.get(self, "read_capacity_auto_scaling_settings")
@read_capacity_auto_scaling_settings.setter
def read_capacity_auto_scaling_settings(self, value: Optional[pulumi.Input['GlobalTableCapacityAutoScalingSettingsArgs']]):
pulumi.set(self, "read_capacity_auto_scaling_settings", value)
@property
@pulumi.getter(name="readCapacityUnits")
def read_capacity_units(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "read_capacity_units")
@read_capacity_units.setter
def read_capacity_units(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "read_capacity_units", value)
@pulumi.input_type
class GlobalTableReplicaGlobalSecondaryIndexSpecificationArgs:
def __init__(__self__, *,
index_name: pulumi.Input[str],
contributor_insights_specification: Optional[pulumi.Input['GlobalTableContributorInsightsSpecificationArgs']] = None,
read_provisioned_throughput_settings: Optional[pulumi.Input['GlobalTableReadProvisionedThroughputSettingsArgs']] = None):
pulumi.set(__self__, "index_name", index_name)
if contributor_insights_specification is not None:
pulumi.set(__self__, "contributor_insights_specification", contributor_insights_specification)
if read_provisioned_throughput_settings is not None:
pulumi.set(__self__, "read_provisioned_throughput_settings", read_provisioned_throughput_settings)
@property
@pulumi.getter(name="indexName")
def index_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "index_name")
@index_name.setter
def index_name(self, value: pulumi.Input[str]):
pulumi.set(self, "index_name", value)
@property
@pulumi.getter(name="contributorInsightsSpecification")
def contributor_insights_specification(self) -> Optional[pulumi.Input['GlobalTableContributorInsightsSpecificationArgs']]:
return pulumi.get(self, "contributor_insights_specification")
@contributor_insights_specification.setter
def contributor_insights_specification(self, value: Optional[pulumi.Input['GlobalTableContributorInsightsSpecificationArgs']]):
pulumi.set(self, "contributor_insights_specification", value)
@property
@pulumi.getter(name="readProvisionedThroughputSettings")
def read_provisioned_throughput_settings(self) -> Optional[pulumi.Input['GlobalTableReadProvisionedThroughputSettingsArgs']]:
return pulumi.get(self, "read_provisioned_throughput_settings")
@read_provisioned_throughput_settings.setter
def read_provisioned_throughput_settings(self, value: Optional[pulumi.Input['GlobalTableReadProvisionedThroughputSettingsArgs']]):
pulumi.set(self, "read_provisioned_throughput_settings", value)
@pulumi.input_type
class GlobalTableReplicaSSESpecificationArgs:
def __init__(__self__, *,
k_ms_master_key_id: pulumi.Input[str]):
pulumi.set(__self__, "k_ms_master_key_id", k_ms_master_key_id)
@property
@pulumi.getter(name="kMSMasterKeyId")
def k_ms_master_key_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "k_ms_master_key_id")
@k_ms_master_key_id.setter
def k_ms_master_key_id(self, value: pulumi.Input[str]):
pulumi.set(self, "k_ms_master_key_id", value)
@pulumi.input_type
class GlobalTableReplicaSpecificationArgs:
def __init__(__self__, *,
region: pulumi.Input[str],
contributor_insights_specification: Optional[pulumi.Input['GlobalTableContributorInsightsSpecificationArgs']] = None,
global_secondary_indexes: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalTableReplicaGlobalSecondaryIndexSpecificationArgs']]]] = None,
point_in_time_recovery_specification: Optional[pulumi.Input['GlobalTablePointInTimeRecoverySpecificationArgs']] = None,
read_provisioned_throughput_settings: Optional[pulumi.Input['GlobalTableReadProvisionedThroughputSettingsArgs']] = None,
s_se_specification: Optional[pulumi.Input['GlobalTableReplicaSSESpecificationArgs']] = None,
table_class: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalTableTagArgs']]]] = None):
pulumi.set(__self__, "region", region)
if contributor_insights_specification is not None:
pulumi.set(__self__, "contributor_insights_specification", contributor_insights_specification)
if global_secondary_indexes is not None:
pulumi.set(__self__, "global_secondary_indexes", global_secondary_indexes)
if point_in_time_recovery_specification is not None:
pulumi.set(__self__, "point_in_time_recovery_specification", point_in_time_recovery_specification)
if read_provisioned_throughput_settings is not None:
pulumi.set(__self__, "read_provisioned_throughput_settings", read_provisioned_throughput_settings)
if s_se_specification is not None:
pulumi.set(__self__, "s_se_specification", s_se_specification)
if table_class is not None:
pulumi.set(__self__, "table_class", table_class)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def region(self) -> pulumi.Input[str]:
return pulumi.get(self, "region")
@region.setter
def region(self, value: pulumi.Input[str]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="contributorInsightsSpecification")
def contributor_insights_specification(self) -> Optional[pulumi.Input['GlobalTableContributorInsightsSpecificationArgs']]:
return pulumi.get(self, "contributor_insights_specification")
@contributor_insights_specification.setter
def contributor_insights_specification(self, value: Optional[pulumi.Input['GlobalTableContributorInsightsSpecificationArgs']]):
pulumi.set(self, "contributor_insights_specification", value)
@property
@pulumi.getter(name="globalSecondaryIndexes")
def global_secondary_indexes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GlobalTableReplicaGlobalSecondaryIndexSpecificationArgs']]]]:
return pulumi.get(self, "global_secondary_indexes")
@global_secondary_indexes.setter
def global_secondary_indexes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalTableReplicaGlobalSecondaryIndexSpecificationArgs']]]]):
pulumi.set(self, "global_secondary_indexes", value)
@property
@pulumi.getter(name="pointInTimeRecoverySpecification")
def point_in_time_recovery_specification(self) -> Optional[pulumi.Input['GlobalTablePointInTimeRecoverySpecificationArgs']]:
return pulumi.get(self, "point_in_time_recovery_specification")
@point_in_time_recovery_specification.setter
def point_in_time_recovery_specification(self, value: Optional[pulumi.Input['GlobalTablePointInTimeRecoverySpecificationArgs']]):
pulumi.set(self, "point_in_time_recovery_specification", value)
@property
@pulumi.getter(name="readProvisionedThroughputSettings")
def read_provisioned_throughput_settings(self) -> Optional[pulumi.Input['GlobalTableReadProvisionedThroughputSettingsArgs']]:
return pulumi.get(self, "read_provisioned_throughput_settings")
@read_provisioned_throughput_settings.setter
def read_provisioned_throughput_settings(self, value: Optional[pulumi.Input['GlobalTableReadProvisionedThroughputSettingsArgs']]):
pulumi.set(self, "read_provisioned_throughput_settings", value)
@property
@pulumi.getter(name="sSESpecification")
def s_se_specification(self) -> Optional[pulumi.Input['GlobalTableReplicaSSESpecificationArgs']]:
return pulumi.get(self, "s_se_specification")
@s_se_specification.setter
def s_se_specification(self, value: Optional[pulumi.Input['GlobalTableReplicaSSESpecificationArgs']]):
pulumi.set(self, "s_se_specification", value)
@property
@pulumi.getter(name="tableClass")
def table_class(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "table_class")
@table_class.setter
def table_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "table_class", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GlobalTableTagArgs']]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalTableTagArgs']]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GlobalTableSSESpecificationArgs:
def __init__(__self__, *,
s_se_enabled: pulumi.Input[bool],
s_se_type: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "s_se_enabled", s_se_enabled)
if s_se_type is not None:
pulumi.set(__self__, "s_se_type", s_se_type)
@property
@pulumi.getter(name="sSEEnabled")
def s_se_enabled(self) -> pulumi.Input[bool]:
return pulumi.get(self, "s_se_enabled")
@s_se_enabled.setter
def s_se_enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "s_se_enabled", value)
@property
@pulumi.getter(name="sSEType")
def s_se_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "s_se_type")
@s_se_type.setter
def s_se_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "s_se_type", value)
@pulumi.input_type
class GlobalTableStreamSpecificationArgs:
def __init__(__self__, *,
stream_view_type: pulumi.Input[str]):
pulumi.set(__self__, "stream_view_type", stream_view_type)
@property
@pulumi.getter(name="streamViewType")
def stream_view_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "stream_view_type")
@stream_view_type.setter
def stream_view_type(self, value: pulumi.Input[str]):
pulumi.set(self, "stream_view_type", value)
@pulumi.input_type
class GlobalTableTagArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
value: pulumi.Input[str]):
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class GlobalTableTargetTrackingScalingPolicyConfigurationArgs:
def __init__(__self__, *,
target_value: pulumi.Input[float],
disable_scale_in: Optional[pulumi.Input[bool]] = None,
scale_in_cooldown: Optional[pulumi.Input[int]] = None,
scale_out_cooldown: Optional[pulumi.Input[int]] = None):
pulumi.set(__self__, "target_value", target_value)
if disable_scale_in is not None:
pulumi.set(__self__, "disable_scale_in", disable_scale_in)
if scale_in_cooldown is not None:
pulumi.set(__self__, "scale_in_cooldown", scale_in_cooldown)
if scale_out_cooldown is not None:
pulumi.set(__self__, "scale_out_cooldown", scale_out_cooldown)
@property
@pulumi.getter(name="targetValue")
def target_value(self) -> pulumi.Input[float]:
return pulumi.get(self, "target_value")
@target_value.setter
def target_value(self, value: pulumi.Input[float]):
pulumi.set(self, "target_value", value)
@property
@pulumi.getter(name="disableScaleIn")
def disable_scale_in(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "disable_scale_in")
@disable_scale_in.setter
def disable_scale_in(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_scale_in", value)
@property
@pulumi.getter(name="scaleInCooldown")
def scale_in_cooldown(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "scale_in_cooldown")
@scale_in_cooldown.setter
def scale_in_cooldown(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "scale_in_cooldown", value)
@property
@pulumi.getter(name="scaleOutCooldown")
def scale_out_cooldown(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "scale_out_cooldown")
@scale_out_cooldown.setter
def scale_out_cooldown(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "scale_out_cooldown", value)
@pulumi.input_type
class GlobalTableTimeToLiveSpecificationArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool],
attribute_name: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "enabled", enabled)
if attribute_name is not None:
pulumi.set(__self__, "attribute_name", attribute_name)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "attribute_name", value)
@pulumi.input_type
class GlobalTableWriteProvisionedThroughputSettingsArgs:
def __init__(__self__, *,
write_capacity_auto_scaling_settings: Optional[pulumi.Input['GlobalTableCapacityAutoScalingSettingsArgs']] = None):
if write_capacity_auto_scaling_settings is not None:
pulumi.set(__self__, "write_capacity_auto_scaling_settings", write_capacity_auto_scaling_settings)
@property
@pulumi.getter(name="writeCapacityAutoScalingSettings")
def write_capacity_auto_scaling_settings(self) -> Optional[pulumi.Input['GlobalTableCapacityAutoScalingSettingsArgs']]:
return pulumi.get(self, "write_capacity_auto_scaling_settings")
@write_capacity_auto_scaling_settings.setter
def write_capacity_auto_scaling_settings(self, value: Optional[pulumi.Input['GlobalTableCapacityAutoScalingSettingsArgs']]):
pulumi.set(self, "write_capacity_auto_scaling_settings", value)
@pulumi.input_type
class TableAttributeDefinitionArgs:
def __init__(__self__, *,
attribute_name: pulumi.Input[str],
attribute_type: pulumi.Input[str]):
pulumi.set(__self__, "attribute_name", attribute_name)
pulumi.set(__self__, "attribute_type", attribute_type)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_name", value)
@property
@pulumi.getter(name="attributeType")
def attribute_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "attribute_type")
@attribute_type.setter
def attribute_type(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_type", value)
@pulumi.input_type
class TableContributorInsightsSpecificationArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool]):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class TableGlobalSecondaryIndexArgs:
def __init__(__self__, *,
index_name: pulumi.Input[str],
key_schema: pulumi.Input[Sequence[pulumi.Input['TableKeySchemaArgs']]],
projection: pulumi.Input['TableProjectionArgs'],
contributor_insights_specification: Optional[pulumi.Input['TableContributorInsightsSpecificationArgs']] = None,
provisioned_throughput: Optional[pulumi.Input['TableProvisionedThroughputArgs']] = None):
pulumi.set(__self__, "index_name", index_name)
pulumi.set(__self__, "key_schema", key_schema)
pulumi.set(__self__, "projection", projection)
if contributor_insights_specification is not None:
pulumi.set(__self__, "contributor_insights_specification", contributor_insights_specification)
if provisioned_throughput is not None:
pulumi.set(__self__, "provisioned_throughput", provisioned_throughput)
@property
@pulumi.getter(name="indexName")
def index_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "index_name")
@index_name.setter
def index_name(self, value: pulumi.Input[str]):
pulumi.set(self, "index_name", value)
@property
@pulumi.getter(name="keySchema")
def key_schema(self) -> pulumi.Input[Sequence[pulumi.Input['TableKeySchemaArgs']]]:
return pulumi.get(self, "key_schema")
@key_schema.setter
def key_schema(self, value: pulumi.Input[Sequence[pulumi.Input['TableKeySchemaArgs']]]):
pulumi.set(self, "key_schema", value)
@property
@pulumi.getter
def projection(self) -> pulumi.Input['TableProjectionArgs']:
return pulumi.get(self, "projection")
@projection.setter
def projection(self, value: pulumi.Input['TableProjectionArgs']):
pulumi.set(self, "projection", value)
@property
@pulumi.getter(name="contributorInsightsSpecification")
def contributor_insights_specification(self) -> Optional[pulumi.Input['TableContributorInsightsSpecificationArgs']]:
return pulumi.get(self, "contributor_insights_specification")
@contributor_insights_specification.setter
def contributor_insights_specification(self, value: Optional[pulumi.Input['TableContributorInsightsSpecificationArgs']]):
pulumi.set(self, "contributor_insights_specification", value)
@property
@pulumi.getter(name="provisionedThroughput")
def provisioned_throughput(self) -> Optional[pulumi.Input['TableProvisionedThroughputArgs']]:
return pulumi.get(self, "provisioned_throughput")
@provisioned_throughput.setter
def provisioned_throughput(self, value: Optional[pulumi.Input['TableProvisionedThroughputArgs']]):
pulumi.set(self, "provisioned_throughput", value)
@pulumi.input_type
class TableKeySchemaArgs:
def __init__(__self__, *,
attribute_name: pulumi.Input[str],
key_type: pulumi.Input[str]):
pulumi.set(__self__, "attribute_name", attribute_name)
pulumi.set(__self__, "key_type", key_type)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_name", value)
@property
@pulumi.getter(name="keyType")
def key_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "key_type")
@key_type.setter
def key_type(self, value: pulumi.Input[str]):
pulumi.set(self, "key_type", value)
@pulumi.input_type
class TableKinesisStreamSpecificationArgs:
def __init__(__self__, *,
stream_arn: pulumi.Input[str]):
pulumi.set(__self__, "stream_arn", stream_arn)
@property
@pulumi.getter(name="streamArn")
def stream_arn(self) -> pulumi.Input[str]:
return pulumi.get(self, "stream_arn")
@stream_arn.setter
def stream_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "stream_arn", value)
@pulumi.input_type
class TableLocalSecondaryIndexArgs:
def __init__(__self__, *,
index_name: pulumi.Input[str],
key_schema: pulumi.Input[Sequence[pulumi.Input['TableKeySchemaArgs']]],
projection: pulumi.Input['TableProjectionArgs']):
pulumi.set(__self__, "index_name", index_name)
pulumi.set(__self__, "key_schema", key_schema)
pulumi.set(__self__, "projection", projection)
@property
@pulumi.getter(name="indexName")
def index_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "index_name")
@index_name.setter
def index_name(self, value: pulumi.Input[str]):
pulumi.set(self, "index_name", value)
@property
@pulumi.getter(name="keySchema")
def key_schema(self) -> pulumi.Input[Sequence[pulumi.Input['TableKeySchemaArgs']]]:
return pulumi.get(self, "key_schema")
@key_schema.setter
def key_schema(self, value: pulumi.Input[Sequence[pulumi.Input['TableKeySchemaArgs']]]):
pulumi.set(self, "key_schema", value)
@property
@pulumi.getter
def projection(self) -> pulumi.Input['TableProjectionArgs']:
return pulumi.get(self, "projection")
@projection.setter
def projection(self, value: pulumi.Input['TableProjectionArgs']):
pulumi.set(self, "projection", value)
@pulumi.input_type
class TablePointInTimeRecoverySpecificationArgs:
def __init__(__self__, *,
point_in_time_recovery_enabled: Optional[pulumi.Input[bool]] = None):
if point_in_time_recovery_enabled is not None:
pulumi.set(__self__, "point_in_time_recovery_enabled", point_in_time_recovery_enabled)
@property
@pulumi.getter(name="pointInTimeRecoveryEnabled")
def point_in_time_recovery_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "point_in_time_recovery_enabled")
@point_in_time_recovery_enabled.setter
def point_in_time_recovery_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "point_in_time_recovery_enabled", value)
@pulumi.input_type
class TableProjectionArgs:
def __init__(__self__, *,
non_key_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
projection_type: Optional[pulumi.Input[str]] = None):
if non_key_attributes is not None:
pulumi.set(__self__, "non_key_attributes", non_key_attributes)
if projection_type is not None:
pulumi.set(__self__, "projection_type", projection_type)
@property
@pulumi.getter(name="nonKeyAttributes")
def non_key_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "non_key_attributes")
@non_key_attributes.setter
def non_key_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "non_key_attributes", value)
@property
@pulumi.getter(name="projectionType")
def projection_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "projection_type")
@projection_type.setter
def projection_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "projection_type", value)
@pulumi.input_type
class TableProvisionedThroughputArgs:
def __init__(__self__, *,
read_capacity_units: pulumi.Input[int],
write_capacity_units: pulumi.Input[int]):
pulumi.set(__self__, "read_capacity_units", read_capacity_units)
pulumi.set(__self__, "write_capacity_units", write_capacity_units)
@property
@pulumi.getter(name="readCapacityUnits")
def read_capacity_units(self) -> pulumi.Input[int]:
return pulumi.get(self, "read_capacity_units")
@read_capacity_units.setter
def read_capacity_units(self, value: pulumi.Input[int]):
pulumi.set(self, "read_capacity_units", value)
@property
@pulumi.getter(name="writeCapacityUnits")
def write_capacity_units(self) -> pulumi.Input[int]:
return pulumi.get(self, "write_capacity_units")
@write_capacity_units.setter
def write_capacity_units(self, value: pulumi.Input[int]):
pulumi.set(self, "write_capacity_units", value)
@pulumi.input_type
class TableSSESpecificationArgs:
def __init__(__self__, *,
s_se_enabled: pulumi.Input[bool],
k_ms_master_key_id: Optional[pulumi.Input[str]] = None,
s_se_type: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "s_se_enabled", s_se_enabled)
if k_ms_master_key_id is not None:
pulumi.set(__self__, "k_ms_master_key_id", k_ms_master_key_id)
if s_se_type is not None:
pulumi.set(__self__, "s_se_type", s_se_type)
@property
@pulumi.getter(name="sSEEnabled")
def s_se_enabled(self) -> pulumi.Input[bool]:
return pulumi.get(self, "s_se_enabled")
@s_se_enabled.setter
def s_se_enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "s_se_enabled", value)
@property
@pulumi.getter(name="kMSMasterKeyId")
def k_ms_master_key_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "k_ms_master_key_id")
@k_ms_master_key_id.setter
def k_ms_master_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "k_ms_master_key_id", value)
@property
@pulumi.getter(name="sSEType")
def s_se_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "s_se_type")
@s_se_type.setter
def s_se_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "s_se_type", value)
@pulumi.input_type
class TableStreamSpecificationArgs:
def __init__(__self__, *,
stream_view_type: pulumi.Input[str]):
pulumi.set(__self__, "stream_view_type", stream_view_type)
@property
@pulumi.getter(name="streamViewType")
def stream_view_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "stream_view_type")
@stream_view_type.setter
def stream_view_type(self, value: pulumi.Input[str]):
pulumi.set(self, "stream_view_type", value)
@pulumi.input_type
class TableTagArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
value: pulumi.Input[str]):
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class TableTimeToLiveSpecificationArgs:
def __init__(__self__, *,
attribute_name: pulumi.Input[str],
enabled: pulumi.Input[bool]):
pulumi.set(__self__, "attribute_name", attribute_name)
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_name", value)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
| 39.453477
| 156
| 0.709175
| 4,361
| 40,282
| 6.208209
| 0.03967
| 0.110106
| 0.069144
| 0.050528
| 0.828618
| 0.775356
| 0.748541
| 0.70311
| 0.672342
| 0.62536
| 0
| 0.00003
| 0.18013
| 40,282
| 1,020
| 157
| 39.492157
| 0.819748
| 0.003997
| 0
| 0.668317
| 1
| 0
| 0.194516
| 0.121625
| 0
| 0
| 0
| 0
| 0
| 1
| 0.216584
| false
| 0
| 0.006188
| 0.089109
| 0.350248
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
384a9e349e88c305a8a8ae1fc6d200c52d5864b0
| 1,395
|
py
|
Python
|
nautobot_ssot/migrations/0002_performance_metrics.py
|
smk4664/nautobot-plugin-ssot
|
7020d0d1910a09c408885bae9f9324fb91977928
|
[
"Apache-2.0"
] | 9
|
2021-07-28T19:21:22.000Z
|
2022-02-16T10:00:36.000Z
|
nautobot_ssot/migrations/0002_performance_metrics.py
|
smk4664/nautobot-plugin-ssot
|
7020d0d1910a09c408885bae9f9324fb91977928
|
[
"Apache-2.0"
] | 15
|
2021-11-10T07:18:59.000Z
|
2022-03-28T05:39:55.000Z
|
nautobot_ssot/migrations/0002_performance_metrics.py
|
smk4664/nautobot-plugin-ssot
|
7020d0d1910a09c408885bae9f9324fb91977928
|
[
"Apache-2.0"
] | 6
|
2021-09-22T15:38:11.000Z
|
2022-03-15T14:46:14.000Z
|
# Generated by Django 3.1.13 on 2021-12-13 14:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("nautobot_ssot", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="sync",
name="diff_time",
field=models.DurationField(blank=True, null=True),
),
migrations.AddField(
model_name="sync",
name="load_time_source",
field=models.DurationField(blank=True, null=True),
),
migrations.AddField(
model_name="sync",
name="load_time_target",
field=models.DurationField(blank=True, null=True),
),
migrations.AddField(
model_name="sync",
name="memory_peak",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="sync",
name="memory_size",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="sync",
name="memory_usage",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="sync",
name="sync_time",
field=models.DurationField(blank=True, null=True),
),
]
| 28.469388
| 62
| 0.551254
| 135
| 1,395
| 5.562963
| 0.318519
| 0.08522
| 0.214381
| 0.251664
| 0.76032
| 0.76032
| 0.713715
| 0.713715
| 0.648469
| 0.648469
| 0
| 0.02139
| 0.329749
| 1,395
| 48
| 63
| 29.0625
| 0.781818
| 0.032975
| 0
| 0.666667
| 1
| 0
| 0.101708
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
698eaa6bca50ad74cb6cd234f3e80d030358b056
| 88
|
py
|
Python
|
03_Estrutura_de_Repeticao/06_imprime_vinte_numeros.py
|
gabrieldcpadilha/ListaDeExercicios-PythonBrasil
|
a92d477468bde5eac8987a26ea79af2ffeb6ad81
|
[
"MIT"
] | null | null | null |
03_Estrutura_de_Repeticao/06_imprime_vinte_numeros.py
|
gabrieldcpadilha/ListaDeExercicios-PythonBrasil
|
a92d477468bde5eac8987a26ea79af2ffeb6ad81
|
[
"MIT"
] | 10
|
2020-08-19T04:31:52.000Z
|
2020-09-21T22:48:29.000Z
|
03_Estrutura_de_Repeticao/06_imprime_vinte_numeros.py
|
gabrieldcpadilha/ListaDeExercicios-PythonBrasil
|
a92d477468bde5eac8987a26ea79af2ffeb6ad81
|
[
"MIT"
] | null | null | null |
for n in range(1, 21):
print(n)
for n in range(1, 21):
print(f"{n}", end=', ')
| 14.666667
| 27
| 0.5
| 18
| 88
| 2.444444
| 0.5
| 0.181818
| 0.272727
| 0.5
| 0.863636
| 0.863636
| 0.863636
| 0
| 0
| 0
| 0
| 0.092308
| 0.261364
| 88
| 5
| 28
| 17.6
| 0.584615
| 0
| 0
| 0.5
| 0
| 0
| 0.056818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
69bf2c35ee78c064112da892785606d9cec29fd2
| 146
|
py
|
Python
|
panoptes/analysis/axes/all.py
|
oberlin/panoptes
|
67d451ea4ffc58c23b5f347bfa5609fa7f853b45
|
[
"BSD-3-Clause"
] | 2
|
2017-07-24T05:11:59.000Z
|
2017-08-27T19:17:42.000Z
|
panoptes/analysis/axes/all.py
|
oberlin/panoptes
|
67d451ea4ffc58c23b5f347bfa5609fa7f853b45
|
[
"BSD-3-Clause"
] | null | null | null |
panoptes/analysis/axes/all.py
|
oberlin/panoptes
|
67d451ea4ffc58c23b5f347bfa5609fa7f853b45
|
[
"BSD-3-Clause"
] | null | null | null |
from panoptes.analysis.axes.x import applications, days, hours, workstations
from panoptes.analysis.axes.y import app_use, avg_session, sessions
| 36.5
| 76
| 0.828767
| 21
| 146
| 5.666667
| 0.761905
| 0.201681
| 0.336134
| 0.403361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09589
| 146
| 3
| 77
| 48.666667
| 0.901515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
69c5e31059156644a6fde78ba8bc910362beb4de
| 41
|
py
|
Python
|
to_classification.py
|
SkirOwen/aie
|
22c64983d53f193e70b045003cc872970ab56804
|
[
"MIT"
] | 2
|
2021-03-24T04:02:32.000Z
|
2021-03-25T16:29:37.000Z
|
to_classification.py
|
SkirOwen/aie
|
22c64983d53f193e70b045003cc872970ab56804
|
[
"MIT"
] | 2
|
2021-03-25T16:00:12.000Z
|
2021-04-01T00:40:26.000Z
|
to_classification.py
|
SkirOwen/aie
|
22c64983d53f193e70b045003cc872970ab56804
|
[
"MIT"
] | null | null | null |
import numpy as np
def n2a():
pass
| 6.833333
| 18
| 0.609756
| 7
| 41
| 3.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.317073
| 41
| 5
| 19
| 8.2
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
385e6bb9993041b8a045764572d82b7438e2b123
| 130
|
py
|
Python
|
src/test/test_todolist.py
|
eu-snehagupta/todolist
|
c476f35930cb58c182b8e771a347883a8623910f
|
[
"MIT"
] | null | null | null |
src/test/test_todolist.py
|
eu-snehagupta/todolist
|
c476f35930cb58c182b8e771a347883a8623910f
|
[
"MIT"
] | 7
|
2021-06-07T14:24:19.000Z
|
2021-06-14T14:05:13.000Z
|
src/test/test_todolist.py
|
eu-snehagupta/todolist
|
c476f35930cb58c182b8e771a347883a8623910f
|
[
"MIT"
] | null | null | null |
import pytest
from src.main import todolist
def get_list_when_empty():
# tasklist = todolist.Todolist.get_list()
pass
| 13
| 45
| 0.730769
| 18
| 130
| 5.055556
| 0.722222
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 130
| 9
| 46
| 14.444444
| 0.866667
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
387a3d7f19c4aa2fd99e4f3c0db420c386b34b01
| 30,622
|
py
|
Python
|
faceplate_emulator/host/vwradio/tests/test_radios.py
|
mullets/vwradio
|
ac5e95579ac16389e59e8c1a45b41b5e0933cad4
|
[
"BSD-3-Clause"
] | 45
|
2018-06-16T07:52:05.000Z
|
2022-03-08T15:55:02.000Z
|
faceplate_emulator/host/vwradio/tests/test_radios.py
|
matt2005/vwradio
|
ac5e95579ac16389e59e8c1a45b41b5e0933cad4
|
[
"BSD-3-Clause"
] | 1
|
2018-03-28T17:13:53.000Z
|
2019-03-02T19:06:34.000Z
|
faceplate_emulator/host/vwradio/tests/test_radios.py
|
matt2005/vwradio
|
ac5e95579ac16389e59e8c1a45b41b5e0933cad4
|
[
"BSD-3-Clause"
] | 8
|
2018-06-15T09:22:44.000Z
|
2021-09-23T20:16:54.000Z
|
import unittest
from vwradio.radios import Radio
from vwradio.constants import OperationModes, DisplayModes, TunerBands
class TestRadio(unittest.TestCase):
def test_safe_mode(self):
values = (
# Premium 4
(b" 0000 ", 0, 0, OperationModes.SAFE_ENTRY),
(b"1 1234 ", 1234, 1, OperationModes.SAFE_ENTRY),
(b"2 5678 ", 5678, 2, OperationModes.SAFE_ENTRY),
(b"9 9999 ", 9999, 9, OperationModes.SAFE_ENTRY),
(b" NO CODE", 0, 0, OperationModes.SAFE_NO_CODE),
# Premium 5
(b" 0000 ", 0, 0, OperationModes.SAFE_ENTRY),
(b"1 1234 ", 1234, 1, OperationModes.SAFE_ENTRY),
(b"2 5678 ", 5678, 2, OperationModes.SAFE_ENTRY),
(b"9 9999 ", 9999, 9, OperationModes.SAFE_ENTRY),
# Premium 4 and 5
(b" SAFE ", 1000, 0, OperationModes.SAFE_LOCKED),
(b"1 SAFE ", 1000, 1, OperationModes.SAFE_LOCKED),
(b"2 SAFE ", 1000, 2, OperationModes.SAFE_LOCKED),
(b"9 SAFE ", 1000, 9, OperationModes.SAFE_LOCKED),
)
for display, safe_code, safe_tries, mode in values:
radio = Radio()
radio.parse(display)
self.assertEqual(radio.safe_code, safe_code)
self.assertEqual(radio.safe_tries, safe_tries)
self.assertEqual(radio.operation_mode, mode)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_initial(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(b" INITIAL")
self.assertEqual(radio.operation_mode,
OperationModes.INITIALIZING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_monsoon_premium_5(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(b" MONSOON")
self.assertEqual(radio.operation_mode,
OperationModes.MONSOON)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_diag(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(b" DIAG ")
self.assertEqual(radio.operation_mode,
OperationModes.DIAGNOSTICS)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_sound_volume(self):
displays = (
b"AM MIN ",
b"AM MAX ",
b"FM1 MIN ",
b"FM1 MAX ",
b"FM2 MIN ",
b"FM2 MAX ",
b"CD MIN ",
b"CD MAX ",
b"TAP MIN ",
b"TAP MAX ",
)
for display in displays:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.SHOWING_OPERATION
# parse display
radio.parse(display)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.ADJUSTING_SOUND_VOLUME)
def test_sound_balance(self):
values = (
(b"BAL LEFT 9", -9),
(b"BAL LEFT 1", -1),
(b"BAL CENTER ", 0),
(b"BAL RIGHT 1", 1),
(b"BAL RIGHT 9", 9),
)
for display, balance in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.SHOWING_OPERATION
# parse display
radio.parse(display)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.ADJUSTING_SOUND_BALANCE)
self.assertEqual(radio.sound_balance, balance)
def test_sound_fade(self):
values = (
(b"FADEREAR 9", -9),
(b"FADEREAR 1", -1),
(b"FADECENTER ", 0),
(b"FADEFRONT 1", 1),
(b"FADEFRONT 9", 9),
)
for display, fade in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.SHOWING_OPERATION
# parse display
radio.parse(display)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.ADJUSTING_SOUND_FADE)
self.assertEqual(radio.sound_fade, fade)
def test_sound_bass(self):
values = (
(b"BASS - 9 ", -9),
(b"BASS - 1 ", -1),
(b"BASS 0 ", 0),
(b"BASS + 1 ", 1),
(b"BASS + 9 ", 9),
)
for display, bass in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.SHOWING_OPERATION
# parse display
radio.parse(display)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.ADJUSTING_SOUND_BASS)
self.assertEqual(radio.sound_bass, bass)
def test_sound_treble(self):
values = (
(b"TREB - 9 ", -9),
(b"TREB - 1 ", -1),
(b"TREB 0 ", 0),
(b"TREB + 1 ", 1),
(b"TREB + 9 ", 9),
)
for display, treble in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.SHOWING_OPERATION
# parse display
radio.parse(display)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.ADJUSTING_SOUND_TREBLE)
self.assertEqual(radio.sound_treble, treble)
def test_sound_midrange_premium_5(self):
values = (
(b"MID - 9 ", -9),
(b"MID - 1 ", -1),
(b"MID 0 ", 0),
(b"MID + 1 ", 1),
(b"MID + 9 ", 9),
)
for display, mid in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.SHOWING_OPERATION
# parse display
radio.parse(display)
self.assertEqual(radio.sound_midrange, mid)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.ADJUSTING_SOUND_MIDRANGE)
def test_set_option_on_vol(self):
values = (
(b"SET ONVOL 0", 0),
(b"SET ONVOL 1", 1),
(b"SET ONVOL02", 2),
(b"SET ONVOL13", 13),
(b"SET ONVOL63", 63),
(b"SET ONVOL99", 99),
)
for display, on_vol in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(display)
self.assertEqual(radio.option_on_vol, on_vol)
self.assertEqual(radio.operation_mode,
OperationModes.SETTING_ON_VOL)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_set_option_cd_mix(self):
values = (
(b"SET CD MIX1", 1),
(b"SET CD MIX6", 6),
)
for display, cd_mix in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(display)
self.assertEqual(radio.option_cd_mix, cd_mix)
self.assertEqual(radio.operation_mode,
OperationModes.SETTING_CD_MIX)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_set_option_tape_skip(self):
values = (
(b"TAPE SKIP N", 0),
(b"TAPE SKIP Y", 1),
)
for display, tape_skip in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(display)
self.assertEqual(radio.option_tape_skip, tape_skip)
self.assertEqual(radio.operation_mode,
OperationModes.SETTING_TAPE_SKIP)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_test_fern(self):
values = (
(b"FERN OFF ", 0),
(b"FERN ON ", 1),
)
for display, fern in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(display)
self.assertEqual(radio.test_fern, fern)
self.assertEqual(radio.operation_mode,
OperationModes.TESTING_FERN)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_test_rad(self):
values = (
(b"RAD 3CP T7 ", b"3CP T7 "), # Premium 4
(b"RAD DE2 ", b" DE2 "), # Premium 5
(b"RAD 0123456", b"0123456"),
)
for display, rad in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(display)
self.assertEqual(radio.test_rad, rad)
self.assertEqual(radio.operation_mode,
OperationModes.TESTING_RAD)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_test_ver(self):
values = (
(b"VER 0702 ", b" 0702 "), # Premium 4
(b"VersA99CZ23", b"A99CZ23"), # Premium 5
(b"VER ABCDEFG", b"ABCDEFG"),
)
for display, ver in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(display)
self.assertEqual(radio.test_ver, ver)
self.assertEqual(radio.operation_mode,
OperationModes.TESTING_VER)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_test_signal_premium5(self):
values = (
(b" 5300 2 6 F", 530, 0x026F), # AM 530 KHz
(b"17100 1 2 3", 1710, 0x0123), # AM 1710 KHz
(b" 8770 5 3 0", 877, 0x0530), # FM 87.7 MHz
(b"10770 6 4 0", 1077, 0x0640), # FM 107.7 MHz
(b"1077A B C D", 1077, 0xABCD),
(b"1077E F 1 2", 1077, 0xEF12),
(b"10770 0 0 0", 1077, 0x0000),
(b"1077F F F F", 1077, 0xFFFF),
)
for display, freq, strength in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.ADJUSTING_SOUND_VOLUME
# parse display
radio.parse(display)
self.assertEqual(radio.test_signal_freq, freq)
self.assertEqual(radio.test_signal_strength, strength)
self.assertEqual(radio.operation_mode,
OperationModes.TESTING_SIGNAL)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_playing(self):
values = (
(b"CD 1 TR 01 ", 1, 1),
(b"CD 6 TR 99 ", 6, 99),
)
for display, disc, track in values:
radio = Radio()
radio.parse(display)
self.assertEqual(radio.cd_disc, disc)
self.assertEqual(radio.cd_track, track)
self.assertEqual(radio.operation_mode,
OperationModes.CD_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_cue_rev_pos(self):
values = (
(b"CUE 000 ", OperationModes.CD_CUE, 5, (0*60)+0),
(b"CUE 001 ", OperationModes.CD_CUE, 5, (0*60)+1),
(b"CUE 012 ", OperationModes.CD_CUE, 5, (0*60)+12),
(b"CUE 123 ", OperationModes.CD_CUE, 5, (1*60)+23),
(b"CUE 1234 ", OperationModes.CD_CUE, 5, (12*60)+34),
(b"CUE 9999 ", OperationModes.CD_CUE, 5, (99*60)+99),
(b"CUE -002 ", OperationModes.CD_CUE, 5, 0),
(b"CUE -1234 ", OperationModes.CD_CUE, 5, 0),
(b"REV 000 ", OperationModes.CD_REV, 5, (0*60)+0),
(b"REV 001 ", OperationModes.CD_REV, 5, (0*60)+1),
(b"REV 012 ", OperationModes.CD_REV, 5, (0*60)+12),
(b"REV 123 ", OperationModes.CD_REV, 5, (1*60)+23),
(b"REV 1234 ", OperationModes.CD_REV, 5, (12*60)+34),
(b"REV 9999 ", OperationModes.CD_REV, 5, (99*60)+99),
(b"REV -002 ", OperationModes.CD_REV, 5, 0),
(b"REV -1234 ", OperationModes.CD_REV, 5, 0),
(b"CD 2 000 ", OperationModes.CD_PLAYING, 2, (0*60)+0),
(b"CD 2 001 ", OperationModes.CD_PLAYING, 2, (0*60)+1),
(b"CD 2 012 ", OperationModes.CD_PLAYING, 2, (0*60)+12),
(b"CD 2 123 ", OperationModes.CD_PLAYING, 2, (1*60)+23),
(b"CD 2 1234 ", OperationModes.CD_PLAYING, 2, (12*60)+34),
(b"CD 2 9999 ", OperationModes.CD_PLAYING, 2, (99*60)+99),
(b"CD 2 -002 ", OperationModes.CD_PLAYING, 2, 0),
(b"CD 2-1234 ", OperationModes.CD_PLAYING, 2, 0),
)
for display, operation_mode, cd_disc, cd_track_pos in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 5
radio.cd_track = 12
# parse display
radio.parse(display)
self.assertEqual(radio.cd_disc, cd_disc)
self.assertEqual(radio.cd_track, 12)
self.assertEqual(radio.cd_track_pos, cd_track_pos)
self.assertEqual(radio.operation_mode,
operation_mode)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_scanning(self):
values = (
(b"SCANCD1TR04", 1, 4),
(b"SCANCD3TR15", 3, 15),
)
for display, disc, track in values:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 5
radio.cd_track = 12
radio.cd_track_pos = 42
# parse display
radio.parse(display)
self.assertEqual(radio.cd_disc, disc)
self.assertEqual(radio.cd_track, track)
self.assertEqual(radio.cd_track_pos, 0)
self.assertEqual(radio.operation_mode,
OperationModes.CD_SCANNING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_check_magazine(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 1
radio.cd_track = 3
radio.cd_track_pos = 99
# parse display
radio.parse(b"CHK MAGAZIN")
self.assertEqual(radio.cd_disc, 0)
self.assertEqual(radio.cd_track, 0)
self.assertEqual(radio.cd_track_pos, 0)
self.assertEqual(radio.operation_mode,
OperationModes.CD_CHECK_MAGAZINE)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_cdx_no_cd(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 1
radio.cd_track = 3
radio.cd_track_pos = 99
# parse display
radio.parse(b"CD 2 NO CD ") # space in "CD 2"
self.assertEqual(radio.cd_disc, 2)
self.assertEqual(radio.cd_track, 0)
self.assertEqual(radio.cd_track_pos, 0)
self.assertEqual(radio.operation_mode,
OperationModes.CD_CDX_NO_CD)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_cdx_cd_err(self):
displays = (
b"CD1 CD ERR ", # Premium 4
b"CD 1CD ERR ", # Premium 5
)
for display in displays:
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 5
radio.cd_track = 3
radio.cd_track_pos = 99
# parse display
radio.parse(display)
self.assertEqual(radio.cd_disc, 1)
self.assertEqual(radio.cd_track, 0)
self.assertEqual(radio.cd_track_pos, 0)
self.assertEqual(radio.operation_mode,
OperationModes.CD_CDX_CD_ERR)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_no_disc(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 5
radio.cd_track = 3
radio.cd_track_pos = 99
# parse display
radio.parse(b" NO DISC")
self.assertEqual(radio.cd_disc, 0)
self.assertEqual(radio.cd_track, 0)
self.assertEqual(radio.cd_track_pos, 0)
self.assertEqual(radio.operation_mode,
OperationModes.CD_NO_DISC)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_no_changer(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 5
radio.cd_track = 3
radio.cd_track_pos = 99
# parse display
radio.parse(b"NO CHANGER")
self.assertEqual(radio.cd_disc, 0)
self.assertEqual(radio.cd_track, 0)
self.assertEqual(radio.cd_track_pos, 0)
self.assertEqual(radio.operation_mode,
OperationModes.CD_NO_CHANGER)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_cd_no_magazine(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.CD_PLAYING
radio.cd_disc = 5
radio.cd_track = 3
radio.cd_track_pos = 99
# parse display
radio.parse(b"NO MAGAZIN")
self.assertEqual(radio.cd_disc, 0)
self.assertEqual(radio.cd_track, 0)
self.assertEqual(radio.cd_track_pos, 0)
self.assertEqual(radio.operation_mode,
OperationModes.CD_NO_MAGAZINE)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_load_premium_5(self):
radio = Radio()
# set up known values
radio.tape_side = 1
# parse display
radio.parse(b"TAPE LOAD ")
self.assertEqual(radio.tape_side, 0)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_LOAD)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_metal_premium_5(self):
radio = Radio()
# set up known values
radio.tape_side = 1
radio.operation_mode = OperationModes.TAPE_PLAYING
# parse display
radio.parse(b"TAPE METAL ")
self.assertEqual(radio.tape_side, 1)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_METAL)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_bls(self):
radio = Radio()
# set up known values
radio.tape_side = 2
radio.operation_mode = OperationModes.TAPE_PLAYING
# parse display
radio.parse(b"TAPE BLS ")
self.assertEqual(radio.tape_side, 2)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_BLS)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_play_a(self):
radio = Radio()
# set up known values
radio.tape_side = 2
radio.operation_mode = OperationModes.TUNER_PLAYING
# parse display
radio.parse(b"TAPE PLAY A")
self.assertEqual(radio.tape_side, 1)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_play_b(self):
radio = Radio()
# set up known values
radio.tape_side = 1
radio.operation_mode = OperationModes.TUNER_PLAYING
# parse display
radio.parse(b"TAPE PLAY B")
self.assertEqual(radio.tape_side, 2)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_scan_a(self):
radio = Radio()
# set up known values
radio.tape_side = 2
radio.operation_mode = OperationModes.TUNER_PLAYING
# parse display
radio.parse(b"TAPE SCAN A")
self.assertEqual(radio.tape_side, 1)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_SCANNING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_scan_b(self):
radio = Radio()
# set up known values
radio.tape_side = 1
radio.operation_mode = OperationModes.TUNER_PLAYING
# parse display
radio.parse(b"TAPE SCAN B")
self.assertEqual(radio.tape_side, 2)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_SCANNING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_ff(self):
radio = Radio()
# set up known values
radio.tape_side = 1
# parse display
radio.parse(b"TAPE FF ")
self.assertEqual(radio.tape_side, 1)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_FF)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_mss_ff(self):
radio = Radio()
# set up known values
radio.tape_side = 2
# parse display
radio.parse(b"TAPEMSS FF ")
self.assertEqual(radio.tape_side, 2)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_MSS_FF)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_rew(self):
radio = Radio()
# set up known values
radio.tape_side = 1
# parse display
radio.parse(b"TAPE REW ")
self.assertEqual(radio.tape_side, 1)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_REW)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_mss_rew(self):
radio = Radio()
# set up known values
radio.tape_side = 2
# parse display
radio.parse(b"TAPEMSS REW")
self.assertEqual(radio.tape_side, 2)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_MSS_REW)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_error(self):
radio = Radio()
# set up known values
radio.tape_side = 1
# parse display
radio.parse(b"TAPE ERROR ")
self.assertEqual(radio.tape_side, 0)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_ERROR)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tape_no_tape(self):
radio = Radio()
# set up known values
radio.tape_side = 1
# parse display
radio.parse(b" NO TAPE")
self.assertEqual(radio.tape_side, 0)
self.assertEqual(radio.operation_mode,
OperationModes.TAPE_NO_TAPE)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tuner_fm_scan_off(self):
values = (
(b"FM1 887MHz", 887, TunerBands.FM1, 0),
(b"FM1 887MHZ", 887, TunerBands.FM1, 0),
(b"FM1 1023MHZ", 1023, TunerBands.FM1, 0),
(b"FM11 915MHZ", 915, TunerBands.FM1, 1),
(b"FM161079MHZ", 1079, TunerBands.FM1, 6),
(b"FM2 887MHZ", 887, TunerBands.FM2, 0),
(b"FM2 1023MHZ", 1023, TunerBands.FM2, 0),
(b"FM21 915MHZ", 915, TunerBands.FM2, 1),
(b"FM261079MHZ", 1079, TunerBands.FM2, 6),
)
for display, freq, band, preset in values:
radio = Radio()
radio.parse(display)
self.assertEqual(radio.tuner_band, band)
self.assertEqual(radio.tuner_freq, freq)
self.assertEqual(radio.tuner_preset, preset)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tuner_fm_scan_on_fm1_band(self):
radio = Radio()
# set up known values
radio.tuner_band = TunerBands.FM1
radio.tuner_freq = 915
radio.tuner_preset = 1
# parse display
radio.parse(b"SCAN 879MHZ")
self.assertEqual(radio.tuner_freq, 879)
self.assertEqual(radio.tuner_preset, 0)
self.assertEqual(radio.tuner_band, TunerBands.FM1)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_SCANNING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tuner_fm_scan_on_fm2_band(self):
radio = Radio()
# set up known values
radio.tuner_band = TunerBands.FM2
radio.tuner_freq = 915
radio.tuner_preset = 1
# parse display
radio.parse(b"SCAN1035MHZ")
self.assertEqual(radio.tuner_freq, 1035)
self.assertEqual(radio.tuner_preset, 0)
self.assertEqual(radio.tuner_band, TunerBands.FM2)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_SCANNING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tuner_fm_scan_on_unknown_band_sets_fm1(self):
radio = Radio()
self.assertEqual(radio.tuner_band, TunerBands.UNKNOWN)
radio.parse(b"SCAN 879MHZ")
self.assertEqual(radio.tuner_freq, 879)
self.assertEqual(radio.tuner_preset, 0)
self.assertEqual(radio.tuner_band, TunerBands.FM1)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_SCANNING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tuner_am_scan_off(self):
values = (
(b"AM 670kHz", 670, 0),
(b"AM 670KHZ", 670, 0),
(b"AM 1540KHZ", 1540, 0),
(b"AM 1 670KHZ", 670, 1),
(b"AM 61540KHZ", 1540, 6),
)
for display, freq, preset in values:
radio = Radio()
radio.parse(display)
self.assertEqual(radio.tuner_freq, freq)
self.assertEqual(radio.tuner_preset, preset)
self.assertEqual(radio.tuner_band, TunerBands.AM)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_tuner_am_scan_on(self):
values = (
(b"SCAN 530kHz", 530),
(b"SCAN1710KHZ", 1710),
)
for display, freq in values:
radio = Radio()
radio.tuner_band = TunerBands.AM
radio.parse(display)
self.assertEqual(radio.tuner_freq, freq)
self.assertEqual(radio.tuner_band, TunerBands.AM)
self.assertEqual(radio.tuner_preset, 0)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_SCANNING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
def test_ignores_blank(self):
radio = Radio()
# set up known values
radio.operation_mode = OperationModes.TUNER_PLAYING
radio.display_mode = DisplayModes.SHOWING_OPERATION
# parse display
radio.parse(b" " * 11)
self.assertEqual(radio.operation_mode,
OperationModes.TUNER_PLAYING)
self.assertEqual(radio.display_mode,
DisplayModes.SHOWING_OPERATION)
| 38.087065
| 71
| 0.582555
| 3,430
| 30,622
| 5.014869
| 0.063557
| 0.143887
| 0.191849
| 0.139527
| 0.833789
| 0.807221
| 0.773153
| 0.740945
| 0.735015
| 0.730946
| 0
| 0.041449
| 0.326367
| 30,622
| 803
| 72
| 38.134496
| 0.792418
| 0.049605
| 0
| 0.577381
| 0
| 0
| 0.056088
| 0
| 0
| 0
| 0.001655
| 0
| 0.245536
| 1
| 0.068452
| false
| 0
| 0.004464
| 0
| 0.074405
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3882604e53e6064936b2cdae0370ecfe7ea5fcbe
| 21,333
|
py
|
Python
|
util/data/gen/GameAssembly.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
util/data/gen/GameAssembly.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
util/data/gen/GameAssembly.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
symbols = []
exports = [{'type': 'function', 'name': 'CloseZStream', 'address': '0x7ffaa6a3f0f0'}, {'type': 'function', 'name': 'CreateZStream', 'address': '0x7ffaa6a3f1a0'}, {'type': 'function', 'name': 'DllCanUnloadNow', 'address': '0x7ffaa6a1f2d0'}, {'type': 'function', 'name': 'DllGetActivationFactory', 'address': '0x7ffaa6a1f2e0'}, {'type': 'function', 'name': 'Flush', 'address': '0x7ffaa6a3f2c0'}, {'type': 'function', 'name': 'ReadZStream', 'address': '0x7ffaa6a3f300'}, {'type': 'function', 'name': 'UnityPalGetLocalTimeZoneData', 'address': '0x7ffaa69d50c0'}, {'type': 'function', 'name': 'UnityPalGetTimeZoneDataForID', 'address': '0x7ffaa69d50d0'}, {'type': 'function', 'name': 'UnityPalTimeZoneInfoGetTimeZoneIDs', 'address': '0x7ffaa69b1030'}, {'type': 'function', 'name': 'UseUnityPalForTimeZoneInformation', 'address': '0x7ffaa69b1030'}, {'type': 'function', 'name': 'WriteZStream', 'address': '0x7ffaa6a3f3c0'}, {'type': 'function', 'name': 'il2cpp_add_internal_call', 'address': '0x7ffaa69c8180'}, {'type': 'function', 'name': 'il2cpp_alloc', 'address': '0x7ffaa69c8190'}, {'type': 'function', 'name': 'il2cpp_allocation_granularity', 'address': '0x7ffaa69605a0'}, {'type': 'function', 'name': 'il2cpp_array_class_get', 'address': '0x7ffaa69c81a0'}, {'type': 'function', 'name': 'il2cpp_array_element_size', 'address': '0x7ffaa69c81b0'}, {'type': 'function', 'name': 'il2cpp_array_get_byte_length', 'address': '0x7ffaa69c81c0'}, {'type': 'function', 'name': 'il2cpp_array_length', 'address': '0x7ffaa69c81d0'}, {'type': 'function', 'name': 'il2cpp_array_new', 'address': '0x7ffaa69c81e0'}, {'type': 'function', 'name': 'il2cpp_array_new_full', 'address': '0x7ffaa69c81f0'}, {'type': 'function', 'name': 'il2cpp_array_new_specific', 'address': '0x7ffaa69c8200'}, {'type': 'function', 'name': 'il2cpp_array_object_header_size', 'address': '0x7ffaa69c8210'}, {'type': 'function', 'name': 'il2cpp_assembly_get_image', 'address': '0x7ffaa69c8220'}, {'type': 'function', 'name': 'il2cpp_bounded_array_class_get', 'address': '0x7ffaa69c8230'}, {'type': 'function', 'name': 'il2cpp_capture_memory_snapshot', 'address': '0x7ffaa69c8240'}, {'type': 'function', 'name': 'il2cpp_class_array_element_size', 'address': '0x7ffaa69c8250'}, {'type': 'function', 'name': 'il2cpp_class_enum_basetype', 'address': '0x7ffaa69c8260'}, {'type': 'function', 'name': 'il2cpp_class_for_each', 'address': '0x7ffaa69c8270'}, {'type': 'function', 'name': 'il2cpp_class_from_il2cpp_type', 'address': '0x7ffaa69c8280'}, {'type': 'function', 'name': 'il2cpp_class_from_name', 'address': '0x7ffaa69c8290'}, {'type': 'function', 'name': 'il2cpp_class_from_system_type', 'address': '0x7ffaa69c82a0'}, {'type': 'function', 'name': 'il2cpp_class_from_type', 'address': '0x7ffaa69c8280'}, {'type': 'function', 'name': 'il2cpp_class_get_assemblyname', 'address': '0x7ffaa69c82b0'}, {'type': 'function', 'name': 'il2cpp_class_get_bitmap', 'address': '0x7ffaa69c82c0'}, {'type': 'function', 'name': 'il2cpp_class_get_bitmap_size', 'address': '0x7ffaa69c82e0'}, {'type': 'function', 'name': 'il2cpp_class_get_data_size', 'address': '0x7ffaa69c82f0'}, {'type': 'function', 'name': 'il2cpp_class_get_declaring_type', 'address': '0x7ffaa69c8300'}, {'type': 'function', 'name': 'il2cpp_class_get_element_class', 'address': '0x7ffaa69c8310'}, {'type': 'function', 'name': 'il2cpp_class_get_events', 'address': '0x7ffaa69c8320'}, {'type': 'function', 'name': 'il2cpp_class_get_field_from_name', 'address': '0x7ffaa69c8330'}, {'type': 'function', 'name': 'il2cpp_class_get_fields', 'address': '0x7ffaa69c8340'}, {'type': 'function', 'name': 'il2cpp_class_get_flags', 'address': '0x7ffaa69c8350'}, {'type': 'function', 'name': 'il2cpp_class_get_image', 'address': '0x7ffaa69c8220'}, {'type': 'function', 'name': 'il2cpp_class_get_interfaces', 'address': '0x7ffaa69c8360'}, {'type': 'function', 'name': 'il2cpp_class_get_method_from_name', 'address': '0x7ffaa69c8370'}, {'type': 'function', 'name': 'il2cpp_class_get_methods', 'address': '0x7ffaa69c8380'}, {'type': 'function', 'name': 'il2cpp_class_get_name', 'address': '0x7ffaa69c8390'}, {'type': 'function', 'name': 'il2cpp_class_get_namespace', 'address': '0x7ffaa69c83a0'}, {'type': 'function', 'name': 'il2cpp_class_get_nested_types', 'address': '0x7ffaa69c83b0'}, {'type': 'function', 'name': 'il2cpp_class_get_parent', 'address': '0x7ffaa69c83c0'}, {'type': 'function', 'name': 'il2cpp_class_get_properties', 'address': '0x7ffaa69c83d0'}, {'type': 'function', 'name': 'il2cpp_class_get_property_from_name', 'address': '0x7ffaa69c83e0'}, {'type': 'function', 'name': 'il2cpp_class_get_rank', 'address': '0x7ffaa69c83f0'}, {'type': 'function', 'name': 'il2cpp_class_get_static_field_data', 'address': '0x7ffaa68f3de0'}, {'type': 'function', 'name': 'il2cpp_class_get_type', 'address': '0x7ffaa69c8400'}, {'type': 'function', 'name': 'il2cpp_class_get_type_token', 'address': '0x7ffaa69c8410'}, {'type': 'function', 'name': 'il2cpp_class_get_userdata_offset', 'address': '0x7ffaa69c8420'}, {'type': 'function', 'name': 'il2cpp_class_has_attribute', 'address': '0x7ffaa69c8430'}, {'type': 'function', 'name': 'il2cpp_class_has_parent', 'address': '0x7ffaa69c8440'}, {'type': 'function', 'name': 'il2cpp_class_has_references', 'address': '0x7ffaa69c8450'}, {'type': 'function', 'name': 'il2cpp_class_instance_size', 'address': '0x7ffaa69c8460'}, {'type': 'function', 'name': 'il2cpp_class_is_abstract', 'address': '0x7ffaa69c8470'}, {'type': 'function', 'name': 'il2cpp_class_is_assignable_from', 'address': '0x7ffaa69c8480'}, {'type': 'function', 'name': 'il2cpp_class_is_blittable', 'address': '0x7ffaa69c8490'}, {'type': 'function', 'name': 'il2cpp_class_is_enum', 'address': '0x7ffaa69c84a0'}, {'type': 'function', 'name': 'il2cpp_class_is_generic', 'address': '0x7ffaa69c84b0'}, {'type': 'function', 'name': 'il2cpp_class_is_inflated', 'address': '0x7ffaa69c84c0'}, {'type': 'function', 'name': 'il2cpp_class_is_interface', 'address': '0x7ffaa69c84d0'}, {'type': 'function', 'name': 'il2cpp_class_is_subclass_of', 'address': '0x7ffaa69c84e0'}, {'type': 'function', 'name': 'il2cpp_class_is_valuetype', 'address': '0x7ffaa69c84f0'}, {'type': 'function', 'name': 'il2cpp_class_num_fields', 'address': '0x7ffaa69c8500'}, {'type': 'function', 'name': 'il2cpp_class_set_userdata', 'address': '0x7ffaa69c8510'}, {'type': 'function', 'name': 'il2cpp_class_value_size', 'address': '0x7ffaa69c8520'}, {'type': 'function', 'name': 'il2cpp_current_thread_get_frame_at', 'address': '0x7ffaa69c8530'}, {'type': 'function', 'name': 'il2cpp_current_thread_get_stack_depth', 'address': '0x7ffaa69c8540'}, {'type': 'function', 'name': 'il2cpp_current_thread_get_top_frame', 'address': '0x7ffaa69c8560'}, {'type': 'function', 'name': 'il2cpp_current_thread_walk_frame_stack', 'address': '0x7ffaa69c8570'}, {'type': 'function', 'name': 'il2cpp_custom_attrs_construct', 'address': '0x7ffaa69c8580'}, {'type': 'function', 'name': 'il2cpp_custom_attrs_free', 'address': '0x7ffaa68f49c0'}, {'type': 'function', 'name': 'il2cpp_custom_attrs_from_class', 'address': '0x7ffaa69c8590'}, {'type': 'function', 'name': 'il2cpp_custom_attrs_from_method', 'address': '0x7ffaa69c85b0'}, {'type': 'function', 'name': 'il2cpp_custom_attrs_get_attr', 'address': '0x7ffaa69c85d0'}, {'type': 'function', 'name': 'il2cpp_custom_attrs_has_attr', 'address': '0x7ffaa69c85e0'}, {'type': 'function', 'name': 'il2cpp_debug_get_method_info', 'address': '0x7ffaa69c85f0'}, {'type': 'function', 'name': 'il2cpp_debugger_set_agent_options', 'address': '0x7ffaa68f49c0'}, {'type': 'function', 'name': 'il2cpp_domain_assembly_open', 'address': '0x7ffaa69c8600'}, {'type': 'function', 'name': 'il2cpp_domain_get', 'address': '0x7ffaa69c8610'}, {'type': 'function', 'name': 'il2cpp_domain_get_assemblies', 'address': '0x7ffaa69c8620'}, {'type': 'function', 'name': 'il2cpp_exception_from_name_msg', 'address': '0x7ffaa69c8650'}, {'type': 'function', 'name': 'il2cpp_field_get_flags', 'address': '0x7ffaa69c8660'}, {'type': 'function', 'name': 'il2cpp_field_get_name', 'address': '0x7ffaa69c8220'}, {'type': 'function', 'name': 'il2cpp_field_get_offset', 'address': '0x7ffaa69c8670'}, {'type': 'function', 'name': 'il2cpp_field_get_parent', 'address': '0x7ffaa69c8390'}, {'type': 'function', 'name': 'il2cpp_field_get_type', 'address': '0x7ffaa69c8680'}, {'type': 'function', 'name': 'il2cpp_field_get_value', 'address': '0x7ffaa69c8690'}, {'type': 'function', 'name': 'il2cpp_field_get_value_object', 'address': '0x7ffaa69c86a0'}, {'type': 'function', 'name': 'il2cpp_field_has_attribute', 'address': '0x7ffaa69c86b0'}, {'type': 'function', 'name': 'il2cpp_field_is_literal', 'address': '0x7ffaa69c86c0'}, {'type': 'function', 'name': 'il2cpp_field_set_value', 'address': '0x7ffaa69c86d0'}, {'type': 'function', 'name': 'il2cpp_field_set_value_object', 'address': '0x7ffaa69c86e0'}, {'type': 'function', 'name': 'il2cpp_field_static_get_value', 'address': '0x7ffaa69c86f0'}, {'type': 'function', 'name': 'il2cpp_field_static_set_value', 'address': '0x7ffaa69c8700'}, {'type': 'function', 'name': 'il2cpp_format_exception', 'address': '0x7ffaa69c8710'}, {'type': 'function', 'name': 'il2cpp_format_stack_trace', 'address': '0x7ffaa69c87a0'}, {'type': 'function', 'name': 'il2cpp_free', 'address': '0x7ffaa69c8830'}, {'type': 'function', 'name': 'il2cpp_free_captured_memory_snapshot', 'address': '0x7ffaa69c8840'}, {'type': 'function', 'name': 'il2cpp_gc_collect', 'address': '0x7ffaa69c8850'}, {'type': 'function', 'name': 'il2cpp_gc_collect_a_little', 'address': '0x7ffaa69c8860'}, {'type': 'function', 'name': 'il2cpp_gc_disable', 'address': '0x7ffaa69c8870'}, {'type': 'function', 'name': 'il2cpp_gc_enable', 'address': '0x7ffaa69c8880'}, {'type': 'function', 'name': 'il2cpp_gc_foreach_heap', 'address': '0x7ffaa69c8890'}, {'type': 'function', 'name': 'il2cpp_gc_get_heap_size', 'address': '0x7ffaa69c88c0'}, {'type': 'function', 'name': 'il2cpp_gc_get_max_time_slice_ns', 'address': '0x7ffaa69c88d0'}, {'type': 'function', 'name': 'il2cpp_gc_get_used_size', 'address': '0x7ffaa69c88e0'}, {'type': 'function', 'name': 'il2cpp_gc_has_strict_wbarriers', 'address': '0x7ffaa6958400'}, {'type': 'function', 'name': 'il2cpp_gc_is_disabled', 'address': '0x7ffaa69c88f0'}, {'type': 'function', 'name': 'il2cpp_gc_is_incremental', 'address': '0x7ffaa69c8900'}, {'type': 'function', 'name': 'il2cpp_gc_set_external_allocation_tracker', 'address': '0x7ffaa68f49c0'}, {'type': 'function', 'name': 'il2cpp_gc_set_external_wbarrier_tracker', 'address': '0x7ffaa68f49c0'}, {'type': 'function', 'name': 'il2cpp_gc_set_max_time_slice_ns', 'address': '0x7ffaa69c8910'}, {'type': 'function', 'name': 'il2cpp_gc_wbarrier_set_field', 'address': '0x7ffaa69c8920'}, {'type': 'function', 'name': 'il2cpp_gchandle_foreach_get_target', 'address': '0x7ffaa69c8930'}, {'type': 'function', 'name': 'il2cpp_gchandle_free', 'address': '0x7ffaa69c8960'}, {'type': 'function', 'name': 'il2cpp_gchandle_get_target', 'address': '0x7ffaa69c8970'}, {'type': 'function', 'name': 'il2cpp_gchandle_new', 'address': '0x7ffaa69c8980'}, {'type': 'function', 'name': 'il2cpp_gchandle_new_weakref', 'address': '0x7ffaa69c8990'}, {'type': 'function', 'name': 'il2cpp_get_corlib', 'address': '0x7ffaa69c89a0'}, {'type': 'function', 'name': 'il2cpp_get_exception_argument_null', 'address': '0x7ffaa69c89b0'}, {'type': 'function', 'name': 'il2cpp_image_get_assembly', 'address': '0x7ffaa69c8390'}, {'type': 'function', 'name': 'il2cpp_image_get_class', 'address': '0x7ffaa69c89c0'}, {'type': 'function', 'name': 'il2cpp_image_get_class_count', 'address': '0x7ffaa69c89d0'}, {'type': 'function', 'name': 'il2cpp_image_get_entry_point', 'address': '0x7ffaa69c89e0'}, {'type': 'function', 'name': 'il2cpp_image_get_filename', 'address': '0x7ffaa69c8220'}, {'type': 'function', 'name': 'il2cpp_image_get_name', 'address': '0x7ffaa69c8220'}, {'type': 'function', 'name': 'il2cpp_init', 'address': '0x7ffaa69c89f0'}, {'type': 'function', 'name': 'il2cpp_init_utf16', 'address': '0x7ffaa69c8a20'}, {'type': 'function', 'name': 'il2cpp_is_debugger_attached', 'address': '0x7ffaa69c8ab0'}, {'type': 'function', 'name': 'il2cpp_is_vm_thread', 'address': '0x7ffaa69c8ac0'}, {'type': 'function', 'name': 'il2cpp_method_get_class', 'address': '0x7ffaa69c83a0'}, {'type': 'function', 'name': 'il2cpp_method_get_declaring_type', 'address': '0x7ffaa69c83a0'}, {'type': 'function', 'name': 'il2cpp_method_get_flags', 'address': '0x7ffaa69c8ad0'}, {'type': 'function', 'name': 'il2cpp_method_get_from_reflection', 'address': '0x7ffaa69c8390'}, {'type': 'function', 'name': 'il2cpp_method_get_name', 'address': '0x7ffaa69c8390'}, {'type': 'function', 'name': 'il2cpp_method_get_object', 'address': '0x7ffaa69c8b00'}, {'type': 'function', 'name': 'il2cpp_method_get_param', 'address': '0x7ffaa69c8b10'}, {'type': 'function', 'name': 'il2cpp_method_get_param_count', 'address': '0x7ffaa69c8b20'}, {'type': 'function', 'name': 'il2cpp_method_get_param_name', 'address': '0x7ffaa69c8b30'}, {'type': 'function', 'name': 'il2cpp_method_get_return_type', 'address': '0x7ffaa69c8b40'}, {'type': 'function', 'name': 'il2cpp_method_get_token', 'address': '0x7ffaa69c8b50'}, {'type': 'function', 'name': 'il2cpp_method_has_attribute', 'address': '0x7ffaa69c8b60'}, {'type': 'function', 'name': 'il2cpp_method_is_generic', 'address': '0x7ffaa69c8b70'}, {'type': 'function', 'name': 'il2cpp_method_is_inflated', 'address': '0x7ffaa69c8b80'}, {'type': 'function', 'name': 'il2cpp_method_is_instance', 'address': '0x7ffaa69c8b90'}, {'type': 'function', 'name': 'il2cpp_monitor_enter', 'address': '0x7ffaa69c8ba0'}, {'type': 'function', 'name': 'il2cpp_monitor_exit', 'address': '0x7ffaa69c8bb0'}, {'type': 'function', 'name': 'il2cpp_monitor_pulse', 'address': '0x7ffaa69c8bc0'}, {'type': 'function', 'name': 'il2cpp_monitor_pulse_all', 'address': '0x7ffaa69c8bd0'}, {'type': 'function', 'name': 'il2cpp_monitor_try_enter', 'address': '0x7ffaa69c8be0'}, {'type': 'function', 'name': 'il2cpp_monitor_try_wait', 'address': '0x7ffaa69c8bf0'}, {'type': 'function', 'name': 'il2cpp_monitor_wait', 'address': '0x7ffaa69c8c00'}, {'type': 'function', 'name': 'il2cpp_object_get_class', 'address': '0x7ffaa69c8220'}, {'type': 'function', 'name': 'il2cpp_object_get_size', 'address': '0x7ffaa69c8c10'}, {'type': 'function', 'name': 'il2cpp_object_get_virtual_method', 'address': '0x7ffaa69c8c20'}, {'type': 'function', 'name': 'il2cpp_object_header_size', 'address': '0x7ffaa69605a0'}, {'type': 'function', 'name': 'il2cpp_object_new', 'address': '0x7ffaa69c8c30'}, {'type': 'function', 'name': 'il2cpp_object_unbox', 'address': '0x7ffaa69c8c50'}, {'type': 'function', 'name': 'il2cpp_offset_of_array_bounds_in_array_object_header', 'address': '0x7ffaa69605a0'}, {'type': 'function', 'name': 'il2cpp_offset_of_array_length_in_array_object_header', 'address': '0x7ffaa69c8c60'}, {'type': 'function', 'name': 'il2cpp_override_stack_backtrace', 'address': '0x7ffaa69c8c70'}, {'type': 'function', 'name': 'il2cpp_profiler_install', 'address': '0x7ffaa69c8c80'}, {'type': 'function', 'name': 'il2cpp_profiler_install_allocation', 'address': '0x7ffaa69c8c90'}, {'type': 'function', 'name': 'il2cpp_profiler_install_enter_leave', 'address': '0x7ffaa69c8ca0'}, {'type': 'function', 'name': 'il2cpp_profiler_install_fileio', 'address': '0x7ffaa69c8cb0'}, {'type': 'function', 'name': 'il2cpp_profiler_install_gc', 'address': '0x7ffaa69c8cc0'}, {'type': 'function', 'name': 'il2cpp_profiler_install_thread', 'address': '0x7ffaa69c8cd0'}, {'type': 'function', 'name': 'il2cpp_profiler_set_events', 'address': '0x7ffaa69c8ce0'}, {'type': 'function', 'name': 'il2cpp_property_get_flags', 'address': '0x7ffaa69c8cf0'}, {'type': 'function', 'name': 'il2cpp_property_get_get_method', 'address': '0x7ffaa69c8390'}, {'type': 'function', 'name': 'il2cpp_property_get_name', 'address': '0x7ffaa69c8680'}, {'type': 'function', 'name': 'il2cpp_property_get_parent', 'address': '0x7ffaa69c8220'}, {'type': 'function', 'name': 'il2cpp_property_get_set_method', 'address': '0x7ffaa69c83a0'}, {'type': 'function', 'name': 'il2cpp_raise_exception', 'address': '0x7ffaa69c8d00'}, {'type': 'function', 'name': 'il2cpp_register_debugger_agent_transport', 'address': '0x7ffaa68f49c0'}, {'type': 'function', 'name': 'il2cpp_register_log_callback', 'address': '0x7ffaa69c8d10'}, {'type': 'function', 'name': 'il2cpp_resolve_icall', 'address': '0x7ffaa69c8d20'}, {'type': 'function', 'name': 'il2cpp_runtime_class_init', 'address': '0x7ffaa69c8d30'}, {'type': 'function', 'name': 'il2cpp_runtime_invoke', 'address': '0x7ffaa69c8d40'}, {'type': 'function', 'name': 'il2cpp_runtime_invoke_convert_args', 'address': '0x7ffaa69c8d60'}, {'type': 'function', 'name': 'il2cpp_runtime_object_init', 'address': '0x7ffaa69c8d80'}, {'type': 'function', 'name': 'il2cpp_runtime_object_init_exception', 'address': '0x7ffaa69c8d90'}, {'type': 'function', 'name': 'il2cpp_runtime_unhandled_exception_policy_set', 'address': '0x7ffaa69c8da0'}, {'type': 'function', 'name': 'il2cpp_set_commandline_arguments', 'address': '0x7ffaa69c8db0'}, {'type': 'function', 'name': 'il2cpp_set_commandline_arguments_utf16', 'address': '0x7ffaa69c8dc0'}, {'type': 'function', 'name': 'il2cpp_set_config', 'address': '0x7ffaa69c8dd0'}, {'type': 'function', 'name': 'il2cpp_set_config_dir', 'address': '0x7ffaa69c8de0'}, {'type': 'function', 'name': 'il2cpp_set_config_utf16', 'address': '0x7ffaa69c8df0'}, {'type': 'function', 'name': 'il2cpp_set_data_dir', 'address': '0x7ffaa69c8e00'}, {'type': 'function', 'name': 'il2cpp_set_default_thread_affinity', 'address': '0x7ffaa69c8e10'}, {'type': 'function', 'name': 'il2cpp_set_find_plugin_callback', 'address': '0x7ffaa69c8e20'}, {'type': 'function', 'name': 'il2cpp_set_memory_callbacks', 'address': '0x7ffaa69c8e30'}, {'type': 'function', 'name': 'il2cpp_set_temp_dir', 'address': '0x7ffaa69c8e40'}, {'type': 'function', 'name': 'il2cpp_shutdown', 'address': '0x7ffaa69c8e50'}, {'type': 'function', 'name': 'il2cpp_start_gc_world', 'address': '0x7ffaa69c8e60'}, {'type': 'function', 'name': 'il2cpp_stats_dump_to_file', 'address': '0x7ffaa69c8e70'}, {'type': 'function', 'name': 'il2cpp_stats_get_value', 'address': '0x7ffaa69c90e0'}, {'type': 'function', 'name': 'il2cpp_stop_gc_world', 'address': '0x7ffaa69c9160'}, {'type': 'function', 'name': 'il2cpp_string_chars', 'address': '0x7ffaa69c9170'}, {'type': 'function', 'name': 'il2cpp_string_intern', 'address': '0x7ffaa69c9180'}, {'type': 'function', 'name': 'il2cpp_string_is_interned', 'address': '0x7ffaa69c9190'}, {'type': 'function', 'name': 'il2cpp_string_length', 'address': '0x7ffaa69c91a0'}, {'type': 'function', 'name': 'il2cpp_string_new', 'address': '0x7ffaa69c91b0'}, {'type': 'function', 'name': 'il2cpp_string_new_len', 'address': '0x7ffaa69c91c0'}, {'type': 'function', 'name': 'il2cpp_string_new_utf16', 'address': '0x7ffaa69c91d0'}, {'type': 'function', 'name': 'il2cpp_string_new_wrapper', 'address': '0x7ffaa69c91b0'}, {'type': 'function', 'name': 'il2cpp_thread_attach', 'address': '0x7ffaa69c91e0'}, {'type': 'function', 'name': 'il2cpp_thread_current', 'address': '0x7ffaa69c91f0'}, {'type': 'function', 'name': 'il2cpp_thread_detach', 'address': '0x7ffaa69c9200'}, {'type': 'function', 'name': 'il2cpp_thread_get_all_attached_threads', 'address': '0x7ffaa69c9210'}, {'type': 'function', 'name': 'il2cpp_thread_get_frame_at', 'address': '0x7ffaa69c9220'}, {'type': 'function', 'name': 'il2cpp_thread_get_stack_depth', 'address': '0x7ffaa69c9230'}, {'type': 'function', 'name': 'il2cpp_thread_get_top_frame', 'address': '0x7ffaa69c9240'}, {'type': 'function', 'name': 'il2cpp_thread_walk_frame_stack', 'address': '0x7ffaa69c9250'}, {'type': 'function', 'name': 'il2cpp_type_equals', 'address': '0x7ffaa69c9260'}, {'type': 'function', 'name': 'il2cpp_type_get_assembly_qualified_name', 'address': '0x7ffaa69c9270'}, {'type': 'function', 'name': 'il2cpp_type_get_attrs', 'address': '0x7ffaa69c9310'}, {'type': 'function', 'name': 'il2cpp_type_get_class_or_element_class', 'address': '0x7ffaa69c9320'}, {'type': 'function', 'name': 'il2cpp_type_get_name', 'address': '0x7ffaa69c9330'}, {'type': 'function', 'name': 'il2cpp_type_get_name_chunked', 'address': '0x7ffaa69c93d0'}, {'type': 'function', 'name': 'il2cpp_type_get_object', 'address': '0x7ffaa69c93e0'}, {'type': 'function', 'name': 'il2cpp_type_get_type', 'address': '0x7ffaa69c93f0'}, {'type': 'function', 'name': 'il2cpp_type_is_byref', 'address': '0x7ffaa69c9400'}, {'type': 'function', 'name': 'il2cpp_type_is_pointer_type', 'address': '0x7ffaa69c9410'}, {'type': 'function', 'name': 'il2cpp_type_is_static', 'address': '0x7ffaa69c9420'}, {'type': 'function', 'name': 'il2cpp_unhandled_exception', 'address': '0x7ffaa69c9430'}, {'type': 'function', 'name': 'il2cpp_unity_install_unitytls_interface', 'address': '0x7ffaa69c9440'}, {'type': 'function', 'name': 'il2cpp_unity_liveness_calculation_begin', 'address': '0x7ffaa69c9450'}, {'type': 'function', 'name': 'il2cpp_unity_liveness_calculation_end', 'address': '0x7ffaa69c9460'}, {'type': 'function', 'name': 'il2cpp_unity_liveness_calculation_from_root', 'address': '0x7ffaa69c9470'}, {'type': 'function', 'name': 'il2cpp_unity_liveness_calculation_from_statics', 'address': '0x7ffaa69c9480'}, {'type': 'function', 'name': 'il2cpp_value_box', 'address': '0x7ffaa69c9490'}]
| 10,666.5
| 21,320
| 0.707074
| 2,198
| 21,333
| 6.518653
| 0.199727
| 0.201005
| 0.268007
| 0.351619
| 0.528197
| 0.399777
| 0.130025
| 0.054299
| 0.007677
| 0
| 0
| 0.100608
| 0.067689
| 21,333
| 2
| 21,320
| 10,666.5
| 0.61979
| 0
| 0
| 0
| 0
| 0
| 0.706384
| 0.254992
| 0
| 0
| 0.157495
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38b9b3560a1c59e5b6130170892ca5f275d8f483
| 29,150
|
py
|
Python
|
seqgra/learner/tensorflow/keraslearner.py
|
gifford-lab/seqgra
|
3c7547878ecda4c00572746b8a07e0d614c9dbef
|
[
"MIT"
] | null | null | null |
seqgra/learner/tensorflow/keraslearner.py
|
gifford-lab/seqgra
|
3c7547878ecda4c00572746b8a07e0d614c9dbef
|
[
"MIT"
] | null | null | null |
seqgra/learner/tensorflow/keraslearner.py
|
gifford-lab/seqgra
|
3c7547878ecda4c00572746b8a07e0d614c9dbef
|
[
"MIT"
] | 2
|
2021-06-14T20:27:40.000Z
|
2021-06-14T20:29:29.000Z
|
"""MIT - CSAIL - Gifford Lab - seqgra
TensorFlow Keras learners
@author: Konstantin Krismer
"""
from distutils.util import strtobool
from typing import Any, List, Optional
import numpy as np
import tensorflow as tf
from seqgra import ModelSize
from seqgra.learner import DNAMultiClassClassificationLearner
from seqgra.learner import DNAMultiLabelClassificationLearner
from seqgra.learner import ProteinMultiClassClassificationLearner
from seqgra.learner import ProteinMultiLabelClassificationLearner
from seqgra.learner.tensorflow import KerasHelper
from seqgra.model import ModelDefinition
class KerasDNAMultiClassClassificationLearner(
DNAMultiClassClassificationLearner):
def __init__(self, model_definition: ModelDefinition, data_dir: str,
output_dir: str, validate_data: bool = True,
gpu_id: int = 0, silent: bool = False) -> None:
super().__init__(model_definition, data_dir, output_dir,
validate_data, gpu_id, silent=silent)
KerasHelper.init_tf_memory_policy()
gpus = tf.config.list_physical_devices("GPU")
self.use_cuda: bool = tf.test.is_built_with_gpu_support() and \
len(gpus) > 0 and gpu_id != -1
if self.use_cuda:
tf.config.set_visible_devices(gpus[gpu_id], "GPU")
self.device_label: str = "/GPU:" + str(gpu_id)
else:
self.device_label: str = "/CPU:0"
self._check_task_loss_compatibility()
def _check_task_loss_compatibility(self) -> None:
if "loss" in self.definition.loss_hyperparameters:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if not loss in KerasHelper.MULTI_CLASS_CLASSIFICATION_LOSSES:
self.logger.warning("loss function '%s' is incompatible with "
"multi-class classification models", loss)
def _get_output_layer_activation_function(self) -> Optional[str]:
if "from_logits" in self.definition.loss_hyperparameters and \
"loss" in self.definition.loss_hyperparameters:
from_logits: bool = bool(strtobool(
self.definition.loss_hyperparameters["from_logits"]))
if from_logits:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if loss == "categoricalcrossentropy" or \
loss == "sparsecategoricalcrossentropy":
return "softmax"
elif loss == "binarycrossentropy":
self.logger.warning("activation function 'sigmoid' is "
"incompatible with multi-class "
"classification models")
return "sigmoid"
return None
def create_model(self) -> None:
KerasHelper.create_model(self)
def print_model_summary(self):
KerasHelper.print_model_summary(self)
def set_seed(self) -> None:
KerasHelper.set_seed(self)
def _train_model(self,
file_name_train: Optional[str] = None,
file_name_val: Optional[str] = None,
x_train: Optional[List[str]] = None,
y_train: Optional[List[str]] = None,
x_val: Optional[List[str]] = None,
y_val: Optional[List[str]] = None) -> None:
if x_train is not None and y_train is not None:
training_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_train), self.encode_y(y_train)))
elif file_name_train is not None:
seq_len: int = self.get_sequence_length(file_name_train)
def train_generator():
return self.dataset_generator(file_name_train)
training_dataset = tf.data.Dataset.from_generator(
train_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_train or x_train, y_train")
if x_val is not None and y_val is not None:
validation_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_val), self.encode_y(y_val)))
elif file_name_val is not None:
seq_len: int = self.get_sequence_length(file_name_val)
def val_generator():
return self.dataset_generator(file_name_val)
validation_dataset = tf.data.Dataset.from_generator(
val_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_val or x_val, y_val")
KerasHelper.train_model(self, training_dataset, validation_dataset,
self.silent)
def evaluate_model(self, file_name: Optional[str] = None,
x: Optional[List[str]] = None,
y: Optional[List[str]] = None):
if x is not None and y is not None:
dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x), self.encode_y(y)))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name or x, y")
return KerasHelper.evaluate_model(self, dataset)
def predict(self, file_name: Optional[str] = None,
x: Optional[Any] = None,
encode: bool = True):
if x is not None:
if encode:
x = self.encode_x(x)
dataset = tf.data.Dataset.from_tensor_slices((x))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size])))
else:
raise Exception("specify either file_name or x")
return KerasHelper.predict(self, dataset)
def save_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.save_model(self, file_name)
def write_session_info(self) -> None:
KerasHelper.write_session_info(self)
def load_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.load_model(self, file_name)
def get_num_params(self) -> ModelSize:
return KerasHelper.get_num_params(self)
def encode_x(self, x: List[str]):
encoded_x = super().encode_x(x)
if self.definition.input_encoding == "2D":
# from (N, W, C) to (N, H, W, C)
encoded_x = np.expand_dims(encoded_x, axis=1)
return encoded_x
def decode_x(self, x):
if self.definition.input_encoding == "2D":
# from (N, H, W, C) to (N, W, C)
x = np.squeeze(x, axis=1)
return super().decode_x(x)
class KerasDNAMultiLabelClassificationLearner(
DNAMultiLabelClassificationLearner):
def __init__(self, model_definition: ModelDefinition, data_dir: str,
output_dir: str, validate_data: bool = True,
gpu_id: int = 0, silent: bool = False) -> None:
super().__init__(model_definition, data_dir, output_dir,
validate_data, gpu_id, silent=silent)
KerasHelper.init_tf_memory_policy()
gpus = tf.config.list_physical_devices("GPU")
self.use_cuda: bool = tf.test.is_built_with_gpu_support() and \
len(gpus) > 0 and gpu_id != -1
if self.use_cuda:
tf.config.set_visible_devices(gpus[gpu_id], "GPU")
self.device_label: str = "/GPU:" + str(gpu_id)
else:
self.device_label: str = "/CPU:0"
self._check_task_loss_compatibility()
def _check_task_loss_compatibility(self) -> None:
if "loss" in self.definition.loss_hyperparameters:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if not loss in KerasHelper.MULTI_LABEL_CLASSIFICATION_LOSSES:
self.logger.warning("loss function '%s' is incompatible with "
"multi-label classification models", loss)
def _get_output_layer_activation_function(self) -> Optional[str]:
if "from_logits" in self.definition.loss_hyperparameters and \
"loss" in self.definition.loss_hyperparameters:
from_logits: bool = bool(strtobool(
self.definition.loss_hyperparameters["from_logits"]))
if from_logits:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if loss == "categoricalcrossentropy" or \
loss == "sparsecategoricalcrossentropy":
self.logger.warning("activation function 'sofmax' is "
"incompatible with multi-label "
"classification models")
return "softmax"
elif loss == "binarycrossentropy":
return "sigmoid"
return None
def create_model(self) -> None:
KerasHelper.create_model(self)
def print_model_summary(self):
KerasHelper.print_model_summary(self)
def set_seed(self) -> None:
KerasHelper.set_seed(self)
def _train_model(self,
file_name_train: Optional[str] = None,
file_name_val: Optional[str] = None,
x_train: Optional[List[str]] = None,
y_train: Optional[List[str]] = None,
x_val: Optional[List[str]] = None,
y_val: Optional[List[str]] = None) -> None:
if x_train is not None and y_train is not None:
training_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_train), self.encode_y(y_train)))
elif file_name_train is not None:
seq_len: int = self.get_sequence_length(file_name_train)
def train_generator():
return self.dataset_generator(file_name_train)
training_dataset = tf.data.Dataset.from_generator(
train_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_train or x_train, y_train")
if x_val is not None and y_val is not None:
validation_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_val), self.encode_y(y_val)))
elif file_name_val is not None:
seq_len: int = self.get_sequence_length(file_name_val)
def val_generator():
return self.dataset_generator(file_name_val)
validation_dataset = tf.data.Dataset.from_generator(
val_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_val or x_val, y_val")
KerasHelper.train_model(self, training_dataset, validation_dataset,
self.silent)
def evaluate_model(self, file_name: Optional[str] = None,
x: Optional[List[str]] = None,
y: Optional[List[str]] = None):
if x is not None and y is not None:
dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x), self.encode_y(y)))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name or x, y")
return KerasHelper.evaluate_model(self, dataset)
def predict(self, file_name: Optional[str] = None,
x: Optional[Any] = None,
encode: bool = True):
if x is not None:
if encode:
x = self.encode_x(x)
dataset = tf.data.Dataset.from_tensor_slices((x))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size])))
else:
raise Exception("specify either file_name or x")
return KerasHelper.predict(self, dataset)
def save_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.save_model(self, file_name)
def write_session_info(self) -> None:
KerasHelper.write_session_info(self)
def load_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.load_model(self, file_name)
def get_num_params(self) -> ModelSize:
return KerasHelper.get_num_params(self)
def encode_x(self, x: List[str]):
encoded_x = super().encode_x(x)
if self.definition.input_encoding == "2D":
# from (N, W, C) to (N, H, W, C)
encoded_x = np.expand_dims(encoded_x, axis=1)
return encoded_x
def decode_x(self, x):
if self.definition.input_encoding == "2D":
# from (N, H, W, C) to (N, W, C)
x = np.squeeze(x, axis=1)
return super().decode_x(x)
class KerasProteinMultiClassClassificationLearner(
ProteinMultiClassClassificationLearner):
def __init__(self, model_definition: ModelDefinition, data_dir: str,
output_dir: str, validate_data: bool = True,
gpu_id: int = 0, silent: bool = False) -> None:
super().__init__(model_definition, data_dir, output_dir,
validate_data, gpu_id, silent=silent)
KerasHelper.init_tf_memory_policy()
gpus = tf.config.list_physical_devices("GPU")
self.use_cuda: bool = tf.test.is_built_with_gpu_support() and \
len(gpus) > 0 and gpu_id != -1
if self.use_cuda:
tf.config.set_visible_devices(gpus[gpu_id], "GPU")
self.device_label: str = "/GPU:" + str(gpu_id)
else:
self.device_label: str = "/CPU:0"
self._check_task_loss_compatibility()
def _check_task_loss_compatibility(self) -> None:
if "loss" in self.definition.loss_hyperparameters:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if not loss in KerasHelper.MULTI_CLASS_CLASSIFICATION_LOSSES:
self.logger.warning("loss function '%s' is incompatible with "
"multi-class classification models", loss)
def _get_output_layer_activation_function(self) -> Optional[str]:
if "from_logits" in self.definition.loss_hyperparameters and \
"loss" in self.definition.loss_hyperparameters:
from_logits: bool = bool(strtobool(
self.definition.loss_hyperparameters["from_logits"]))
if from_logits:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if loss == "categoricalcrossentropy" or \
loss == "sparsecategoricalcrossentropy":
return "softmax"
elif loss == "binarycrossentropy":
self.logger.warning("activation function 'sigmoid' is "
"incompatible with multi-class "
"classification models")
return "sigmoid"
return None
def create_model(self) -> None:
KerasHelper.create_model(self)
def print_model_summary(self):
KerasHelper.print_model_summary(self)
def set_seed(self) -> None:
KerasHelper.set_seed(self)
def _train_model(self,
file_name_train: Optional[str] = None,
file_name_val: Optional[str] = None,
x_train: Optional[List[str]] = None,
y_train: Optional[List[str]] = None,
x_val: Optional[List[str]] = None,
y_val: Optional[List[str]] = None) -> None:
if x_train is not None and y_train is not None:
training_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_train), self.encode_y(y_train)))
elif file_name_train is not None:
seq_len: int = self.get_sequence_length(file_name_train)
def train_generator():
return self.dataset_generator(file_name_train)
training_dataset = tf.data.Dataset.from_generator(
train_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_train or x_train, y_train")
if x_val is not None and y_val is not None:
validation_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_val), self.encode_y(y_val)))
elif file_name_val is not None:
seq_len: int = self.get_sequence_length(file_name_val)
def val_generator():
return self.dataset_generator(file_name_val)
validation_dataset = tf.data.Dataset.from_generator(
val_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_val or x_val, y_val")
KerasHelper.train_model(self, training_dataset, validation_dataset,
self.silent)
def evaluate_model(self, file_name: Optional[str] = None,
x: Optional[List[str]] = None,
y: Optional[List[str]] = None):
if x is not None and y is not None:
dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x), self.encode_y(y)))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name or x, y")
return KerasHelper.evaluate_model(self, dataset)
def predict(self, file_name: Optional[str] = None,
x: Optional[Any] = None,
encode: bool = True):
if x is not None:
if encode:
x = self.encode_x(x)
dataset = tf.data.Dataset.from_tensor_slices((x))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size])))
else:
raise Exception("specify either file_name or x")
return KerasHelper.predict(self, dataset)
def save_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.save_model(self, file_name)
def write_session_info(self) -> None:
KerasHelper.write_session_info(self)
def load_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.load_model(self, file_name)
def get_num_params(self) -> ModelSize:
return KerasHelper.get_num_params(self)
def encode_x(self, x: List[str]):
encoded_x = super().encode_x(x)
if self.definition.input_encoding == "2D":
# from (N, W, C) to (N, H, W, C)
encoded_x = np.expand_dims(encoded_x, axis=1)
return encoded_x
def decode_x(self, x):
if self.definition.input_encoding == "2D":
# from (N, H, W, C) to (N, W, C)
x = np.squeeze(x, axis=1)
return super().decode_x(x)
class KerasProteinMultiLabelClassificationLearner(
ProteinMultiLabelClassificationLearner):
def __init__(self, model_definition: ModelDefinition, data_dir: str,
output_dir: str, validate_data: bool = True,
gpu_id: int = 0, silent: bool = False) -> None:
super().__init__(model_definition, data_dir, output_dir,
validate_data, gpu_id, silent=silent)
KerasHelper.init_tf_memory_policy()
gpus = tf.config.list_physical_devices("GPU")
self.use_cuda: bool = tf.test.is_built_with_gpu_support() and \
len(gpus) > 0 and gpu_id != -1
if self.use_cuda:
tf.config.set_visible_devices(gpus[gpu_id], "GPU")
self.device_label: str = "/GPU:" + str(gpu_id)
else:
self.device_label: str = "/CPU:0"
self._check_task_loss_compatibility()
def _check_task_loss_compatibility(self) -> None:
if "loss" in self.definition.loss_hyperparameters:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if not loss in KerasHelper.MULTI_LABEL_CLASSIFICATION_LOSSES:
self.logger.warning("loss function '%s' is incompatible with "
"multi-label classification models", loss)
def _get_output_layer_activation_function(self) -> Optional[str]:
if "from_logits" in self.definition.loss_hyperparameters and \
"loss" in self.definition.loss_hyperparameters:
from_logits: bool = bool(strtobool(
self.definition.loss_hyperparameters["from_logits"]))
if from_logits:
loss: str = self.definition.loss_hyperparameters["loss"]
loss = loss.lower().replace("_", "").strip()
if loss == "categoricalcrossentropy" or \
loss == "sparsecategoricalcrossentropy":
self.logger.warning("activation function 'softmax' is "
"incompatible with multi-label "
"classification models")
return "softmax"
elif loss == "binarycrossentropy":
return "sigmoid"
return None
def create_model(self) -> None:
KerasHelper.create_model(self)
def print_model_summary(self):
KerasHelper.print_model_summary(self)
def set_seed(self) -> None:
KerasHelper.set_seed(self)
def _train_model(self,
file_name_train: Optional[str] = None,
file_name_val: Optional[str] = None,
x_train: Optional[List[str]] = None,
y_train: Optional[List[str]] = None,
x_val: Optional[List[str]] = None,
y_val: Optional[List[str]] = None) -> None:
if x_train is not None and y_train is not None:
training_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_train), self.encode_y(y_train)))
elif file_name_train is not None:
seq_len: int = self.get_sequence_length(file_name_train)
def train_generator():
return self.dataset_generator(file_name_train)
training_dataset = tf.data.Dataset.from_generator(
train_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_train or x_train, y_train")
if x_val is not None and y_val is not None:
validation_dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x_val), self.encode_y(y_val)))
elif file_name_val is not None:
seq_len: int = self.get_sequence_length(file_name_val)
def val_generator():
return self.dataset_generator(file_name_val)
validation_dataset = tf.data.Dataset.from_generator(
val_generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name_val or x_val, y_val")
KerasHelper.train_model(self, training_dataset, validation_dataset,
self.silent)
def evaluate_model(self, file_name: Optional[str] = None,
x: Optional[List[str]] = None,
y: Optional[List[str]] = None):
if x is not None and y is not None:
dataset = tf.data.Dataset.from_tensor_slices(
(self.encode_x(x), self.encode_y(y)))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64, tf.bool),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size]),
tf.TensorShape([len(self.definition.labels)])))
else:
raise Exception("specify either file_name or x, y")
return KerasHelper.evaluate_model(self, dataset)
def predict(self, file_name: Optional[str] = None,
x: Optional[Any] = None,
encode: bool = True):
if x is not None:
if encode:
x = self.encode_x(x)
dataset = tf.data.Dataset.from_tensor_slices((x))
elif file_name is not None:
seq_len: int = self.get_sequence_length(file_name)
def generator():
return self.dataset_generator(file_name)
dataset = tf.data.Dataset.from_generator(
generator, (tf.float64),
output_shapes=(tf.TensorShape([seq_len, self.alphabet_size])))
else:
raise Exception("specify either file_name or x")
return KerasHelper.predict(self, dataset)
def save_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.save_model(self, file_name)
def write_session_info(self) -> None:
KerasHelper.write_session_info(self)
def load_model(self, file_name: Optional[str] = None) -> None:
KerasHelper.load_model(self, file_name)
def get_num_params(self) -> ModelSize:
return KerasHelper.get_num_params(self)
def encode_x(self, x: List[str]):
encoded_x = super().encode_x(x)
if self.definition.input_encoding == "2D":
# from (N, W, C) to (N, H, W, C)
encoded_x = np.expand_dims(encoded_x, axis=1)
return encoded_x
def decode_x(self, x):
if self.definition.input_encoding == "2D":
# from (N, H, W, C) to (N, W, C)
x = np.squeeze(x, axis=1)
return super().decode_x(x)
| 43.121302
| 81
| 0.591973
| 3,393
| 29,150
| 4.841733
| 0.04863
| 0.046749
| 0.024105
| 0.038958
| 0.949172
| 0.949172
| 0.949172
| 0.949172
| 0.949172
| 0.949172
| 0
| 0.003184
| 0.310429
| 29,150
| 675
| 82
| 43.185185
| 0.814089
| 0.01163
| 0
| 0.961326
| 0
| 0
| 0.063444
| 0.007223
| 0
| 0
| 0
| 0
| 0
| 1
| 0.139963
| false
| 0
| 0.020258
| 0.036832
| 0.255985
| 0.014733
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7f9549e95823f0cbe72ff61ca903d6a0d6c6820
| 162
|
py
|
Python
|
src/utils/general_validation.py
|
BoaVaga/boavaga_server
|
7d25a68832d3b9f4f5666d0a3d55c99025498511
|
[
"MIT"
] | null | null | null |
src/utils/general_validation.py
|
BoaVaga/boavaga_server
|
7d25a68832d3b9f4f5666d0a3d55c99025498511
|
[
"MIT"
] | null | null | null |
src/utils/general_validation.py
|
BoaVaga/boavaga_server
|
7d25a68832d3b9f4f5666d0a3d55c99025498511
|
[
"MIT"
] | null | null | null |
import re
_REGEX_VALIDATE_TEL = re.compile(r'^\+?[0-9]{1,19}$')
def validate_telefone(tel: str) -> bool:
return _REGEX_VALIDATE_TEL.match(tel) is not None
| 20.25
| 53
| 0.703704
| 27
| 162
| 3.962963
| 0.740741
| 0.242991
| 0.299065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.135802
| 162
| 7
| 54
| 23.142857
| 0.728571
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
2a009883358ef4f7b8a269b3e2fa367f463e7834
| 77
|
py
|
Python
|
themata/__init__.py
|
Thecarisma/themata
|
09a8ce670479ea4e9b5a26457f5cb290728f604a
|
[
"CC0-1.0"
] | 2
|
2020-04-27T10:14:54.000Z
|
2020-04-28T01:24:59.000Z
|
themata/__init__.py
|
Thecarisma/themata
|
09a8ce670479ea4e9b5a26457f5cb290728f604a
|
[
"CC0-1.0"
] | 28
|
2020-05-16T19:50:54.000Z
|
2021-12-02T07:38:03.000Z
|
themata/__init__.py
|
Thecarisma/themata
|
09a8ce670479ea4e9b5a26457f5cb290728f604a
|
[
"CC0-1.0"
] | null | null | null |
import os
def get_html_theme_path():
return os.path.dirname(__file__)
| 19.25
| 36
| 0.74026
| 12
| 77
| 4.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 77
| 4
| 36
| 19.25
| 0.78125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
aa7bd4f0e5ded6bb26301c1f01d5a48f4f082f1f
| 137
|
py
|
Python
|
database/seed/users.py
|
eddycheong/skeleton-flask-sqlalchemy
|
117e4cac0bf4d912f9546e2aeac77bccc2b7e3c0
|
[
"MIT"
] | null | null | null |
database/seed/users.py
|
eddycheong/skeleton-flask-sqlalchemy
|
117e4cac0bf4d912f9546e2aeac77bccc2b7e3c0
|
[
"MIT"
] | null | null | null |
database/seed/users.py
|
eddycheong/skeleton-flask-sqlalchemy
|
117e4cac0bf4d912f9546e2aeac77bccc2b7e3c0
|
[
"MIT"
] | null | null | null |
from database.model import User
def seed_users():
return [
User(name="seed_user_1"),
User(name="seed_user_2"),
]
| 19.571429
| 33
| 0.613139
| 19
| 137
| 4.157895
| 0.631579
| 0.202532
| 0.303797
| 0.405063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.255474
| 137
| 7
| 34
| 19.571429
| 0.754902
| 0
| 0
| 0
| 0
| 0
| 0.15942
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.166667
| 0.166667
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
aab9141ed7548918316b11d21e1c62648a531143
| 9,349
|
py
|
Python
|
tasks/LM/configs.py
|
omri123/rotational-unit-of-memory
|
e796c841e1e837df09497ba77c3bc285db47d02d
|
[
"MIT"
] | 82
|
2019-04-18T19:32:03.000Z
|
2022-03-19T00:50:56.000Z
|
tasks/LM/configs.py
|
omri123/rotational-unit-of-memory
|
e796c841e1e837df09497ba77c3bc285db47d02d
|
[
"MIT"
] | 4
|
2019-04-22T11:58:43.000Z
|
2020-05-31T01:43:03.000Z
|
tasks/LM/configs.py
|
omri123/rotational-unit-of-memory
|
e796c841e1e837df09497ba77c3bc285db47d02d
|
[
"MIT"
] | 26
|
2019-04-22T11:21:42.000Z
|
2021-11-29T06:01:10.000Z
|
def get_config(model):
if model == 'ptb_fs_rum_test':
return ptb_fs_rum_test_config()
if model == 'ptb_fs_rum':
return ptb_fs_rum_config()
elif model == 'ptb_fs_goru':
return ptb_fs_goru_config()
elif model == 'ptb_fs_eunn':
return ptb_fs_eunn_config()
elif model == 'ptb_lstm_single':
return ptb_lstm_single_config()
elif model == 'ptb_rum_single':
return ptb_rum_single_config()
elif model == 'ptb_rum_single_U':
return ptb_rum_single_U_config()
elif model == 'ptb_rum_single_tanh':
return ptb_rum_single_tanh_config()
elif model == 'ptb_rum_single_sigmoid':
return ptb_rum_single_sigmoid_config()
elif model == 'ptb_rum_single_softsign':
return ptb_rum_single_softsign_config()
elif model == 'ptb_rum_single_1500':
return ptb_rum_single_1500_config()
elif model == 'ptb':
return ptb_config()
elif model == 'enwik_rum':
return enwik_rum_config()
elif model == 'enwik':
return enwik_config()
else:
raise ValueError("Invalid model: %s", model)
class enwik_config(object):
"""Enwik8 config."""
cell = "fs-lstm"
init_scale = 0.01
learning_rate = 0.001
max_grad_norm = 1.0
num_layers = 2
num_steps = 100
cell_size = 1200
hyper_size = 1500
embed_size = 256
max_epoch = 35
max_max_epoch = max_epoch
keep_prob = 0.75
zoneout_h = 0.95
zoneout_c = 0.7
lr_decay = 0.1
batch_size = 128
vocab_size = 205
fast_layers = 4
dataset = 'enwik8'
activation = None
class enwik_rum_config(object):
"""Enwik8 config."""
cell = "fs-rum"
init_scale = 0.01
learning_rate = 0.001
max_grad_norm = 1.0
num_layers = 2
num_steps = 100
cell_size = 1200
hyper_size = 2000
embed_size = 256
max_epoch = 60
max_max_epoch = max_epoch
keep_prob = 0.75
zoneout_h = 0.95
zoneout_c = 0.7
lr_decay = 0.1
batch_size = 128
vocab_size = 205
fast_layers = 4
T_norm = 1.0
use_zoneout = True
use_layer_norm = True
activation = "tanh"
dataset = 'enwik8'
class ptb_lstm_single_config(object):
"""PTB config."""
cell = "lstm"
num_steps = 150
learning_rate = 0.002
T_norm = 1.0
num_layers = 1
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1000
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
zoneout_c = 0.5
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
dataset = 'ptb'
class ptb_rum_single_config(object):
"""PTB config."""
cell = "rum"
num_steps = 150
learning_rate = 0.002
T_norm = 1.0
num_layers = 1
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1000
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
activation = "relu"
update_gate = True
dataset = 'ptb'
class ptb_rum_single_U_config(object):
"""PTB config."""
cell = "rum"
num_steps = 150
learning_rate = 0.002
T_norm = 1.0
num_layers = 1
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1400
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
activation = "relu"
update_gate = False
dataset = 'ptb'
class ptb_rum_single_tanh_config(object):
"""PTB config."""
cell = "rum"
num_steps = 150
learning_rate = 0.002
T_norm = None
num_layers = 1
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1000
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
activation = "tanh"
update_gate = True
dataset = 'ptb'
class ptb_rum_single_sigmoid_config(object):
"""PTB config."""
cell = "rum"
num_steps = 150
learning_rate = 0.002
T_norm = None
num_layers = 1
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1000
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
activation = "sigmoid"
update_gate = True
dataset = 'ptb'
class ptb_rum_single_softsign_config(object):
"""PTB config."""
cell = "rum"
num_steps = 150
learning_rate = 0.002
T_norm = None
num_layers = 1
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1000
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
activation = "softsign"
update_gate = True
dataset = 'ptb'
class ptb_rum_single_1500_config(object):
"""PTB config."""
cell = "rum"
num_steps = 150
learning_rate = 0.002
T_norm = 1.0
num_layers = 1
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1500
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
activation = "relu"
update_gate = True
dataset = 'ptb'
class ptb_config(object):
"""PTB config."""
init_scale = 0.01
learning_rate = 0.002
max_grad_norm = 1.0
num_layers = 2
num_steps = 150
cell_size = 700
hyper_size = 400
embed_size = 128
max_epoch = 200
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
zoneout_c = 0.5
lr_decay = 0.1
batch_size = 128
vocab_size = 50
fast_layers = 2
dataset = 'ptb'
class ptb_fs_rum_test_config(object):
"""PTB config."""
cell = "fs-rum"
init_scale = 0.01
learning_rate = 0.002
max_grad_norm = 1.0
num_layers = 2
num_steps = 150
cell_size = 200
hyper_size = 200
embed_size = 128
max_epoch = 200
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
zoneout_c = 0.5
lr_decay = 0.1
batch_size = 128
vocab_size = 50
fast_layers = 2
T_norm = 1.0
use_zoneout = True
use_layer_norm = True
dataset = 'ptb'
class ptb_fs_rum_config(object):
"""PTB config."""
cell = "fs-rum"
init_scale = 0.01
learning_rate = 0.002
max_grad_norm = 1.0
num_layers = 2
num_steps = 150
cell_size = 700
hyper_size = 1000
embed_size = 128
max_epoch = 200
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
zoneout_c = 0.5
lr_decay = 0.1
batch_size = 128
vocab_size = 50
fast_layers = 2
T_norm = 1.0
use_zoneout = True
use_layer_norm = True
dataset = 'ptb'
class ptb_fs_goru_config(object):
"""PTB config."""
cell = "fs-goru"
init_scale = 0.01
learning_rate = 0.002
max_grad_norm = 1.0
num_layers = 2
num_steps = 150
cell_size = 700
hyper_size = 800
embed_size = 128
max_epoch = 200
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
zoneout_c = 0.5
lr_decay = 0.1
batch_size = 128
vocab_size = 50
fast_layers = 2
T_norm = 1.0
use_zoneout = True
use_layer_norm = True
dataset = 'ptb'
class ptb_fs_eunn_config(object):
"""PTB config."""
cell = "fs-eunn"
init_scale = 0.01
learning_rate = 0.002
max_grad_norm = 1.0
num_layers = 2
num_steps = 150
cell_size = 700
hyper_size = 2000
embed_size = 128
max_epoch = 200
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
zoneout_c = 0.5
lr_decay = 0.1
batch_size = 128
vocab_size = 50
fast_layers = 2
T_norm = 1.0
use_zoneout = True
use_layer_norm = True
dataset = 'ptb'
class ptb_fs_goru_config(object):
"""PTB config."""
cell = "fs-goru"
init_scale = 0.01
learning_rate = 0.002
max_grad_norm = 1.0
num_layers = 2
num_steps = 150
cell_size = 700
hyper_size = 800
embed_size = 128
max_epoch = 200
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
zoneout_c = 0.5
lr_decay = 0.1
batch_size = 128
vocab_size = 50
fast_layers = 2
T_norm = 1.0
use_zoneout = True
use_layer_norm = True
dataset = 'ptb'
class ptb_rum_double_config(object):
"""PTB config."""
cell = "rum"
num_steps = 150
learning_rate = 0.002
T_norm = 0.3
num_layers = 2
init_scale = 0.01
max_grad_norm = 1.0
cell_size = 1500
embed_size = 128
max_epoch = 100
max_max_epoch = max_epoch
keep_prob = 0.65
zoneout_h = 0.9
lr_decay = 0.1
batch_size = 128
vocab_size = 50
use_layer_norm = True
use_zoneout = True
dataset = 'ptb'
| 21.247727
| 52
| 0.61536
| 1,436
| 9,349
| 3.671309
| 0.066852
| 0.072838
| 0.02959
| 0.036419
| 0.902124
| 0.853945
| 0.802731
| 0.786229
| 0.786229
| 0.761191
| 0
| 0.098826
| 0.298642
| 9,349
| 439
| 53
| 21.296128
| 0.705201
| 0.021072
| 0
| 0.840731
| 0
| 0
| 0.040679
| 0.004961
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002611
| false
| 0
| 0
| 0
| 0.958225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
2ab221161d835a6c1e55713f418809ebec49b5c5
| 78,437
|
py
|
Python
|
pysit/modeling/frequency_modeling.py
|
zfang-slim/PysitForPython3
|
dc60537b26018e28d92b7a956a2cf96775f0bdf9
|
[
"BSD-3-Clause"
] | null | null | null |
pysit/modeling/frequency_modeling.py
|
zfang-slim/PysitForPython3
|
dc60537b26018e28d92b7a956a2cf96775f0bdf9
|
[
"BSD-3-Clause"
] | null | null | null |
pysit/modeling/frequency_modeling.py
|
zfang-slim/PysitForPython3
|
dc60537b26018e28d92b7a956a2cf96775f0bdf9
|
[
"BSD-3-Clause"
] | 1
|
2020-06-13T07:13:07.000Z
|
2020-06-13T07:13:07.000Z
|
import itertools
from pysit.util.derivatives import build_derivative_matrix, build_permutation_matrix, build_heterogenous_matrices
from pysit.solvers.model_parameter import *
import sys
import copy
import numpy as np
from numpy.random import uniform
__all__ = ['FrequencyModeling']
__docformat__ = "restructuredtext en"
class FrequencyModeling(object):
# read only class description
@property
def solver_type(self): return "frequency"
@property
def modeling_type(self): return "frequency"
def __init__(self, solver):
"""Constructor for the FrequencyInversion class.
Parameters
----------
solver : pysit wave solver object
A wave solver that inherits from pysit.solvers.WaveSolverBase
"""
if self.solver_type == solver.supports['equation_dynamics']:
self.solver = solver
else:
raise TypeError("Argument 'solver' type {1} does not match modeling solver type {0}.".format(
self.solver_type, solver.supports['equation_dynamics']))
def forward_model(self, shot, m0, frequencies, return_parameters=[]):
"""Applies the forward model to the model for the given solver.
Parameters
----------
shot : pysit.Shot
Gives the source signal approximation for the right hand side.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'wavefield', 'simdata', 'simdata_time', 'dWaveOp'}
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* u is used as the target field universally. It could be velocity potential, it could be displacement, it could be pressure.
* uhat is used to generically refer to the DFT of u that is needed to compute the imaging condition.
Forward model computes:
For constant density: -m*(omega**2)*u - lap u = f, where m = 1.0/c**2
For variable density: -m1*(omega**2)*u - div(m2 grad)u = f, where m1=1.0/kappa, m2=1.0/rho, and C = (kappa/rho)**0.5
"""
# Local references
solver = self.solver
# this updates dt and the number of steps so that is appropriate for the current model
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
source = shot.sources
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Setup data storage for the forward modeled data
if 'simdata' in return_parameters:
simdata = dict()
# Storage for the derivative of the propagation operator with respect to the model \frac{d\script{L}}{dm}
if 'dWaveOp' in return_parameters:
dWaveOp = dict()
# Initialize the DFT components
uhats = dict()
# Step k = 0
# p_0 is a zero array because if we assume the input signal is causal
# and we assume that the initial system (i.e., p_(-2) and p_(-1)) is
# uniformly zero, then the leapfrog scheme would compute that p_0 = 0 as
# well. ukm1 is needed to compute the temporal derivative.
solver_data = solver.SolverData()
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
for nu in frequencies:
rhs = solver.build_rhs(mesh.pad_array(source.f(nu=nu)), rhs_wavefieldvector=rhs)
result = solver.solve(solver_data, rhs, nu)
uhat = solver_data.k.primary_wavefield
# Save the unpadded wavefield
if 'wavefield' in return_parameters:
uhats[nu] = mesh.unpad_array(uhat, copy=True)
# Record the data at t_k
if 'simdata' in return_parameters:
simdata[nu] = shot.receivers.sample_data_from_array(mesh.unpad_array(uhat))
# Save the derivative
if 'dWaveOp' in return_parameters:
dWaveOp[nu] = solver.compute_dWaveOp('frequency', uhat, nu)
retval = dict()
if 'dWaveOp' in return_parameters:
retval['dWaveOp'] = dWaveOp
if 'simdata' in return_parameters:
retval['simdata'] = simdata
if 'wavefield' in return_parameters:
retval['wavefield'] = uhats
return retval
def forward_model_list(self, shot_list, m0, frequencies, return_parameters=[], **kwargs):
"""Applies the forward model to the model for the given solver and severals shots
Parameters
----------
shot_list : list of pysit.Shot
Gives the source signal approximation for the right hand side.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'wavefield', 'simdata', 'simdata_time', 'dWaveOp'}
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* u is used as the target field universally. It could be velocity potential, it could be displacement, it could be pressure.
* uhat is used to generically refer to the DFT of u that is needed to compute the imaging condition.
"""
# importing the Petsc libraries for the multiple rhs solve
try:
import petsc4py
petsc4py.init(sys.argv)
from petsc4py import PETSc
from pysit.util.wrappers.petsc import PetscWrapper
except ImportError:
raise ImportError('petsc4py is not installed, please install it and try again')
flag = 'petsc' in kwargs
if flag == 0:
petsc = None
else:
petsc = kwargs['petsc']
# Local references
solver = self.solver
# this updates dt and the number of steps so that is appropriate for the current model
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Setup data storage for the forward modeled data
if 'simdata' in return_parameters:
Simdata = dict()
# Storage for the derivative of the propagation operator with respect to the model \frac{d\script{L}}{dm}
if 'dWaveOp' in return_parameters:
DWaveOp = dict()
# Initialize the DFT components
# Uhats is a dictionnary of dictionnary
Uhats = dict()
# Step k = 0
# p_0 is a zero array because if we assume the input signal is causal
# and we assume that the initial system (i.e., p_(-2) and p_(-1)) is
# uniformly zero, then the leapfrog scheme would compute that p_0 = 0 as
# well. ukm1 is needed to compute the temporal derivative.
solver_data_list = list()
for i in range(len(shot_list)):
solver_data = solver.SolverData()
solver_data_list.append(solver_data)
Uhats[i] = dict()
if 'simdata' in return_parameters:
Simdata[i] = dict()
if 'dWaveOp' in return_parameters:
DWaveOp[i] = dict()
rhs_list = list()
for nu in frequencies:
del rhs_list[:]
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
for i in range(len(shot_list)):
source = shot_list[i].sources
rhs = solver.build_rhs(mesh.pad_array(source.f(nu=nu)), rhs_wavefieldvector=rhs)
rhs_list.append(rhs.data.copy())
if petsc is True:
result = solver.solve_petsc(solver_data_list, rhs_list, nu, **kwargs)
else:
for i in range(len(shot_list)):
result = solver.solve(solver_data_list[i], rhs_list[i], nu)
for i in range(len(shot_list)):
uhat = solver_data_list[i].k.primary_wavefield
# Save the unpadded wavefield
if 'wavefield' in return_parameters:
Uhats[i][nu] = mesh.unpad_array(uhat, copy=True)
# Record the data at t_k
if 'simdata' in return_parameters:
Simdata[i][nu] = shot_list[i].receivers.sample_data_from_array(
mesh.unpad_array(uhat))
# Save the derivative
if 'dWaveOp' in return_parameters:
DWaveOp[i][nu] = solver.compute_dWaveOp('frequency', uhat, nu)
retval = dict()
if 'dWaveOp' in return_parameters:
retval['dWaveOp'] = DWaveOp
if 'simdata' in return_parameters:
retval['simdata'] = Simdata
if 'wavefield' in return_parameters:
retval['wavefield'] = Uhats
return retval
def migrate_shot(self, shot, m0, operand_simdata, frequencies,
operand_dWaveOpAdj=None, operand_model=None,
frequency_weights=None,
dWaveOp=None,
adjointfield=None, dWaveOpAdj=None, wavefield=None):
"""Performs migration on a single shot.
Parameters
----------
shot : pysit.Shot
Shot for which to compute migration.
operand_simdata : ndarray
Operand, i.e., b in F*b. This data is in TIME to properly compute the adjoint.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
utt : list
Imaging condition components from the forward model for each receiver in the shot.
qs : list
Optional return list allowing us to retrieve the adjoint field as desired.
"""
# If the imaging component has not already been computed, compute it.
prep_rp = list()
if dWaveOp is None:
prep_rp.append('dWaveOp')
dWaveOp = dict()
if len(prep_rp) > 0:
retval = self.forward_model(shot, m0, frequencies, return_parameters=prep_rp)
if 'dWaveOp' in prep_rp:
dWaveOp = retval['dWaveOp']
rp = ['imaging_condition']
if adjointfield is not None:
rp.append('adjointfield')
if dWaveOpAdj is not None:
rp.append('dWaveOpAdj')
rv = self.adjoint_model(shot, m0, operand_simdata, frequencies, operand_dWaveOpAdj=operand_dWaveOpAdj, operand_model=operand_model,
frequency_weights=frequency_weights, return_parameters=rp, dWaveOp=dWaveOp, wavefield=wavefield)
# If the adjoint field is desired as output.
for nu in frequencies:
if adjointfield is not None:
adjointfield[nu] = rv['adjointfield'][nu]
if dWaveOpAdj is not None:
dWaveOpAdj[nu] = rv['dWaveOpAdj'][nu]
# Get the imaging condition part from the result, this is the migrated image.
ic = rv['imaging_condition']
return ic
def migrate_shot_list(self, shots_list, m0, operand_simdata, frequencies,
operand_dWaveOpAdj=None, operand_model=None,
frequency_weights=None,
dWaveOp=None,
adjointfield=None, dWaveOpAdj=None, wavefield=None, **kwargs):
"""Performs migration a list of shot shot.
Parameters
----------
shots_list : list of pysit.Shot
Shot for which to compute migration.
operand_simdata : ndarray
Operand, i.e., b in F*b. This data is in TIME to properly compute the adjoint.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
utt : list
Imaging condition components from the forward model for each receiver in the shot.
qs : list
Optional return list allowing us to retrieve the adjoint field as desired.
"""
# If the imaging component has not already been computed, compute it.
prep_rp = list()
if dWaveOp is None:
prep_rp.append('dWaveOp')
dWaveOp = dict()
if len(prep_rp) > 0:
retval = self.forward_model_list(shots_list, m0, frequencies, return_parameters=prep_rp)
if 'dWaveOp' in prep_rp:
dWaveOp = retval['dWaveOp']
rp = ['imaging_condition']
if adjointfield is not None:
rp.append('adjointfield')
if dWaveOpAdj is not None:
rp.append('dWaveOpAdj')
rv = self.adjoint_model_list(shots_list, m0, operand_simdata, frequencies, operand_dWaveOpAdj=operand_dWaveOpAdj, operand_model=operand_model,
frequency_weights=frequency_weights, return_parameters=rp, dWaveOp=dWaveOp, wavefield=wavefield, **kwargs)
# If the adjoint field is desired as output.
for nu in frequencies:
if adjointfield is not None:
adjointfield[nu] = rv['adjointfield'][nu]
if dWaveOpAdj is not None:
dWaveOpAdj[nu] = rv['dWaveOpAdj'][nu]
# Get the imaging condition part from the result, this is the migrated image.
ic = rv['imaging_condition']
return ic
def migrate_shots_extend(self, shots_list, m0, operand_simdata, frequencies,
max_sub_offset, h, operand_dWaveOpAdj=None, operand_model=None,
frequency_weights=None,
dWaveOp=None,
adjointfield=None, dWaveOpAdj=None, wavefield=None, **kwargs):
"""Performs migration a list of shot shot.
Parameters
----------
shots_list : list of pysit.Shot
Shot for which to compute migration.
m0 : background model, solver.ModelParameters (should be velocity [km/s])
operand_simdata : ndarray
Operand, i.e., b in F*b. This data is in TIME to properly compute the adjoint.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
max_sub_offset : maximum subsurface offset for extended modeling
h : subsurface offset interval
utt : list
Imaging condition components from the forward model for each receiver in the shot.
qs : list
Optional return list allowing us to retrieve the adjoint field as desired.
output:
ic: the extended migrated images
"""
flag = 'petsc' in kwargs
if flag == 0:
petsc = None
else:
petsc = kwargs['petsc']
# If the imaging component has not already been computed, compute it.
prep_rp = list()
if dWaveOp is None:
prep_rp.append('dWaveOp')
dWaveOp = dict()
if len(prep_rp) > 0:
retval = self.forward_model_list(
shots_list, m0, frequencies, return_parameters=prep_rp)
if 'dWaveOp' in prep_rp:
dWaveOp = retval['dWaveOp']
rp = ['imaging_condition']
if adjointfield is not None:
rp.append('adjointfield')
if dWaveOpAdj is not None:
rp.append('dWaveOpAdj')
rv = self.adjoint_model_extend(shots_list, m0, operand_simdata, frequencies, max_sub_offset, h, operand_dWaveOpAdj=operand_dWaveOpAdj, operand_model=operand_model,
frequency_weights=frequency_weights, return_parameters=rp, dWaveOp=dWaveOp, wavefield=wavefield)
# rv = self.adjoint_model_extend(shots_list, m0, operand_simdata, frequencies, max_sub_offset, h, return_parameters=rp)
# If the adjoint field is desired as output.
for nu in frequencies:
if adjointfield is not None:
adjointfield[nu] = rv['adjointfield'][nu]
if dWaveOpAdj is not None:
dWaveOpAdj[nu] = rv['dWaveOpAdj'][nu]
# Get the imaging condition part from the result, this is the migrated image.
ic = rv['imaging_condition']
return ic
def adjoint_model(self, shot, m0,
operand_simdata, frequencies,
operand_dWaveOpAdj=None, operand_model=None,
frequency_weights=None,
return_parameters=[],
dWaveOp=None, wavefield=None):
"""Solves for the adjoint field in frequency.
For constant density: -m*(omega**2)*q - lap q = resid, where m = 1.0/c**2
For variable density: -m1*(omega**2)*q - div(m2 grad)q = resid, where m1=1.0/kappa, m2=1.0/rho, and C = (kappa/rho)**0.5
Parameters
----------
shot : pysit.Shot
Gives the receiver model for the right hand side.
operand : ndarray
Right hand side, usually the residual.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'q', 'qhat', 'ic'}
dWaveOp : ndarray
Imaging component from the forward model (in frequency).
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* q is the adjoint field.
* qhat is the DFT of oq at the specified frequencies
* ic is the imaging component. Because this function computes many of
the things required to compute the imaging condition, there is an option
to compute the imaging condition as we go. This should be used to save
computational effort. If the imaging condition is to be computed, the
optional argument utt must be present.
Imaging Condition for variable density has terms:
ic.m1 = omegas**2 * conj(u) * q
ic.m2 = conj(grad(u)) dot grad(q), summed over all shots and frequencies.
"""
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Local references
solver = self.solver
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
source = shot.sources
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
qhats = dict()
if 'dWaveOpAdj' in return_parameters:
dWaveOpAdj = dict()
# If we are computing the imaging condition, ensure that all of the parts are there.
if dWaveOp is None and 'imaging_condition' in return_parameters:
raise ValueError('To compute imaging condition, forward component must be specified.')
if 'imaging_condition' in return_parameters:
ic = solver.model_parameters.perturbation(dtype=np.complex)
if frequency_weights is None:
frequency_weights = itertools.repeat(1.0)
freq_weights = {nu: weight for nu, weight in zip(frequencies, frequency_weights)}
# if we are dealing with variable density, we need to collect the gradient operators, D1 and D2. (note: D2 is the negative adjoint of the leftmost gradient used in our heterogenous laplacian)
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
print("WARNING: Ian's operators are still used here even though the solver has changed. Gradient may be incorrect. These routines need to be updated.")
deltas = [mesh.x.delta, mesh.z.delta]
sh = mesh.shape(include_bc=True, as_grid=True)
D1, D2 = build_heterogenous_matrices(sh, deltas)
if operand_model is not None:
operand_model = operand_model.with_padding()
# Time-reversed wave solver
solver_data = solver.SolverData()
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
for nu in frequencies:
# If we are dealing with variable density, we will need these values computed for the imagining condition in terms of m2.
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
uhat = wavefield[nu]
uhat = mesh.pad_array(uhat)
D1u, D2u = np.conj(D1[0]*uhat), np.conj(D2[0]*uhat) # Need the conj. of grad (uhat)
# Compute the rhs array.
rhs_ = mesh.pad_array(shot.receivers.extend_data_to_array(
data=operand_simdata[nu])) # for primary adjoint equation
if (operand_dWaveOpAdj is not None) and (operand_model is not None):
dWaveOpAdj_nu = operand_dWaveOpAdj[nu]
rhs_ += reshape(operand_model*dWaveOpAdj_nu.reshape(operand_model.shape),
rhs_.shape) # for secondary adjoint equation
rhs = solver.build_rhs(rhs_, rhs_wavefieldvector=rhs)
np.conj(rhs.data, rhs.data)
result = solver.solve(solver_data, rhs, nu)
vhat = solver_data.k.primary_wavefield
# Compute the conjugate in place.
# After this operation, vhats _is_ conjugated, so its value does not
# match the mathematics. This is done to save computation, as computing
# the conjufation in place requires no further allocation. vhats should
# not be used beyond this point, so it is assigned to None.
qhat = np.conj(vhat, vhat)
if 'adjointfield' in return_parameters:
qhats[nu] = mesh.unpad_array(qhat, copy=True)
if 'dWaveOpAdj' in return_parameters:
dWaveOpAdj[nu] = solver.compute_dWaveOp('frequency', qhat, nu)
# If the imaging component needs to be computed, do it
if 'imaging_condition' in return_parameters:
weight = freq_weights[nu]
# if we are dealing with variable density, we compute 2 parts to the imagaing condition seperatly. Otherwise, if it is just constant density- we compute only 1.
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
ic.rho -= weight*((D1u)*(D1[1]*qhat)+(D2u)*(D2[1]*qhat))
ic.kappa -= weight*qhat*np.conj(dWaveOp[nu])
else:
# note, no dnu here because the nus are not generally the complete set, so dnu makes little sense, otherwise dnu = 1./(nsteps*dt)
ic -= weight*qhat*np.conj(dWaveOp[nu])
retval = dict()
if 'adjointfield' in return_parameters:
retval['adjointfield'] = qhats
if 'dWaveOpAdj' in return_parameters:
retval['dWaveOpAdj'] = dWaveOpAdj
# If the imaging component needs to be computed, do it
if 'imaging_condition' in return_parameters:
# retval['imaging_condition'] = ic.without_padding() # Comment out by Zhilong, simply cutting the pml can not produce a correct gradient
# In order to make the gradient correct, you should add all the weights in the pml to the last layer of the computational grid
if m0.padded is True:
retval['imaging_condition'] = ic
else:
if solver.inv_padding_mode is 'add':
retval['imaging_condition'] = ic.add_padding()
else:
retval['imaging_condition'] = ic.without_padding()
return retval
def adjoint_model_list(self, shots_list, m0,
operand_simdata, frequencies,
operand_dWaveOpAdj=None, operand_model=None,
frequency_weights=None,
return_parameters=[],
dWaveOp=None, wavefield=None, **kwargs):
"""Solves for the adjoint field in frequency.
For constant density: -m*(omega**2)*q - lap q = resid, where m = 1.0/c**2
For variable density: -m1*(omega**2)*q - div(m2 grad)q = resid, where m1=1.0/kappa, m2=1.0/rho, and C = (kappa/rho)**0.5
Parameters
----------
shot : pysit.Shot
Gives the receiver model for the right hand side.
operand : ndarray
Right hand side, usually the residual.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'q', 'qhat', 'ic'}
dWaveOp : ndarray
Imaging component from the forward model (in frequency).
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* q is the adjoint field.
* qhat is the DFT of oq at the specified frequencies
* ic is the imaging component. Because this function computes many of
the things required to compute the imaging condition, there is an option
to compute the imaging condition as we go. This should be used to save
computational effort. If the imaging condition is to be computed, the
optional argument utt must be present.
Imaging Condition for variable density has terms:
ic.m1 = omegas**2 * conj(u) * q
ic.m2 = conj(grad(u)) dot grad(q), summed over all shots and frequencies.
"""
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Local references
solver = self.solver
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
Qhats = dict()
if 'dWaveOpAdj' in return_parameters:
DWaveOpAdj = dict()
# If we are computing the imaging condition, ensure that all of the parts are there.
if dWaveOp is None and 'imaging_condition' in return_parameters:
raise ValueError('To compute imaging condition, forward component must be specified.')
if 'imaging_condition' in return_parameters:
Ic = dict()
if frequency_weights is None:
frequency_weights = itertools.repeat(1.0)
freq_weights = {nu: weight for nu, weight in zip(frequencies, frequency_weights)}
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
deltas = [mesh.x.delta, mesh.z.delta]
sh = mesh.shape(include_bc=True, as_grid=True)
D1, D2 = build_heterogenous_matrices(sh, deltas)
solver_data_list = list()
# initialisation for the muliple rhs resolution
for i in range(len(shots_list)):
solver_data = solver.SolverData()
solver_data_list.append(solver_data)
Qhats[i] = dict()
if 'imaging_condition' in return_parameters:
Ic[i] = solver.model_parameters.perturbation(dtype=np.complex)
if 'dWaveOpAdj' in return_parameters:
DWaveOpAdj[i] = dict()
rhs_list = list()
if operand_model is not None:
operand_model = operand_model.with_padding()
for nu in frequencies:
del rhs_list[:]
for i in range(len(shots_list)):
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
rhs_ = mesh.pad_array(shots_list[i].receivers.extend_data_to_array(
data=operand_simdata[i][nu]))
if (operand_dWaveOpAdj is not None) and (operand_model is not None):
dWaveOpAdj_nu = operand_dWaveOpAdj[nu]
# for secondary adjoint equation
rhs_ += reshape(operand_model *
dWaveOpAdj_nu.reshape(operand_model.shape), rhs_.shape)
rhs = solver.build_rhs(rhs_, rhs_wavefieldvector=rhs)
np.conj(rhs.data, rhs.data)
rhs_list.append(rhs.data.copy())
result = solver.solve_petsc(solver_data_list, rhs_list, nu, **kwargs)
for i in range(len(shots_list)):
# If we are dealing with variable density, we will need these values computed for the imagining condition in terms of m2.
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
uhat = wavefield[i][nu]
uhat = mesh.pad_array(uhat)
# Need the conj. of grad (uhat)
D1u, D2u = np.conj(D1[0]*uhat), np.conj(D2[0]*uhat)
vhat = solver_data_list[i].k.primary_wavefield
qhat = np.conj(vhat, vhat)
if 'adjointfield' in return_parameters:
Qhats[i][nu] = mesh.unpad_array(qhat, copy=True)
if 'dWaveOpAdj' in return_parameters:
DWaveOpAdj[i][nu] = solver.compute_dWaveOp('frequency', qhat, nu)
if 'imaging_condition' in return_parameters:
weight = freq_weights[nu]
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
Ic[i].rho -= weight*((D1u)*(D1[1]*qhat)+(D2u)*(D2[1]*qhat))
Ic[i].kappa -= weight*qhat*np.conj(dWaveOp[i][nu])
else:
# note, no dnu here because the nus are not generally the complete set, so dnu makes little sense, otherwise dnu = 1./(nsteps*dt)
Ic[i] -= weight*qhat*np.conj(dWaveOp[i][nu])
# If the imaging component needs to be computed, do it
retval = dict()
if 'adjointfield' in return_parameters:
retval['adjointfield'] = Qhats
if 'dWaveOpAdj' in return_parameters:
retval['dWaveOpAdj'] = DWaveOpAdj
for i in range(len(Ic)):
Ic[i] = Ic[i].without_padding()
# If the imaging component needs to be computed, do it
if 'imaging_condition' in return_parameters:
retval['imaging_condition'] = Ic
return retval
def adjoint_model_extend(self, shots_list, m0, operand_simdata,
frequencies, max_sub_offset, h,
operand_dWaveOpAdj=None, operand_model=None,
frequency_weights=None,
return_parameters=[],
dWaveOp=None, wavefield=None, **kwargs):
"""Solves for the extend adjoint modeling in frequency.
For constant density: -m*(omega**2)*q - lap q = resid, where m = 1.0/c**2
For variable density: -m1*(omega**2)*q - div(m2 grad)q = resid, where m1=1.0/kappa, m2=1.0/rho, and C = (kappa/rho)**0.5
Parameters
----------
shot : pysit.Shot
Gives the receiver model for the right hand side.
operand : ndarray
Right hand side, usually the residual.
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'q', 'qhat', 'ic'}
dWaveOp : ndarray
Imaging component from the forward model (in frequency).
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* q is the adjoint field.
* qhat is the DFT of oq at the specified frequencies
* ic is the imaging component. Because this function computes many of
the things required to compute the imaging condition, there is an option
to compute the imaging condition as we go. This should be used to save
computational effort. If the imaging condition is to be computed, the
optional argument utt must be present.
Imaging Condition for variable density has terms:
ic.m1 = omegas**2 * conj(u) * q
ic.m2 = conj(grad(u)) dot grad(q), summed over all shots and frequencies.
"""
flag = 'petsc' in kwargs
if flag == 0:
petsc = None
else:
petsc = kwargs['petsc']
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Local references
solver = self.solver
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
nh = 2*int(max_sub_offset / h) + 1
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
Qhats = dict()
if 'dWaveOpAdj' in return_parameters:
DWaveOpAdj = dict()
# If we are computing the imaging condition, ensure that all of the parts are there.
if dWaveOp is None and 'imaging_condition' in return_parameters:
raise ValueError(
'To compute imaging condition, forward component must be specified.')
if 'imaging_condition' in return_parameters:
kwargs = {'dtype': 'complex'}
Ic = ExtendedModelingParameter2D(mesh, max_sub_offset, h, **kwargs)
Ic_data_tmp = np.zeros(Ic.sh_data, dtype='complex')
if frequency_weights is None:
frequency_weights = itertools.repeat(1.0)
freq_weights = {nu: weight for nu, weight in zip(
frequencies, frequency_weights)}
sh_sub = Ic.sh_sub
dof_sub = Ic.dof_sub
# Create a fake mesh structrue to perform intermediate padding and unpadding operators
mesh_ih = copy.deepcopy(mesh)
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
deltas = [mesh.x.delta, mesh.z.delta]
sh = mesh.shape(include_bc=True, as_grid=True)
D1, D2 = build_heterogenous_matrices(sh, deltas)
solver_data_list = list()
# initialisation for the muliple rhs resolution
for i in range(len(shots_list)):
solver_data = solver.SolverData()
solver_data_list.append(solver_data)
Qhats[i] = dict()
# if 'imaging_condition' in return_parameters:
if 'dWaveOpAdj' in return_parameters:
DWaveOpAdj[i] = dict()
rhs_list = list()
if operand_model is not None:
operand_model = operand_model.with_padding()
for nu in frequencies:
del rhs_list[:]
for i in range(len(shots_list)):
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
rhs_ = mesh.pad_array(shots_list[i].receivers.extend_data_to_array(data=operand_simdata[i][nu]))
if (operand_dWaveOpAdj is not None) and (operand_model is not None):
dWaveOpAdj_nu = operand_dWaveOpAdj[nu]
# for secondary adjoint equation
rhs_ += np.reshape(operand_model * dWaveOpAdj_nu.reshape(operand_model.shape), rhs_.shape)
rhs = solver.build_rhs(rhs_, rhs_wavefieldvector=rhs)
np.conj(rhs.data, rhs.data)
rhs_list.append(rhs.data.copy())
if petsc is True:
result = solver.solve_petsc(solver_data_list, rhs_list, nu, **kwargs)
else:
for i in range(len(shots_list)):
result = solver.solve(solver_data_list[i], rhs_list[i], nu, **kwargs)
for i in range(len(shots_list)):
# If we are dealing with variable density, we will need these values computed for the imagining condition in terms of m2.
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
uhat = wavefield[i][nu]
uhat = mesh.pad_array(uhat)
# Need the conj. of grad (uhat)
D1u, D2u = np.conj(D1[0]*uhat), np.conj(D2[0]*uhat)
vhat = solver_data_list[i].k.primary_wavefield
qhat = np.conj(vhat, vhat)
if 'adjointfield' in return_parameters:
Qhats[i][nu] = mesh.unpad_array(qhat, copy=True)
if 'dWaveOpAdj' in return_parameters:
DWaveOpAdj[i][nu] = solver.compute_dWaveOp('frequency', qhat, nu)
if 'imaging_condition' in return_parameters:
weight = freq_weights[nu]
# The extended imaging for variational density is not implemented yet
if hasattr(m0, 'kappa') and hasattr(m0, 'rho'):
Ic[i].rho -= weight * \
((D1u)*(D1[1]*qhat)+(D2u)*(D2[1]*qhat))
Ic[i].kappa -= weight*qhat*np.conj(dWaveOp[i][nu])
else:
# note, no dnu here because the nus are not generally the complete set, so dnu makes little sense, otherwise dnu = 1./(nsteps*dt)
for ih in range(0, nh):
n_bcx_extend_u_ih = Ic.n_bcx_extend_u[ih, :]
n_bcx_extend_v_ih = Ic.n_bcx_extend_v[ih, :]
mesh._shapes[(False, False)] = (dof_sub, 1)
mesh._shapes[(False, True)] = sh_sub
mesh.parameters[0].lbc._n = n_bcx_extend_u_ih[0]
mesh.parameters[0].rbc._n = n_bcx_extend_u_ih[1]
u_tmp = mesh.unpad_array(np.conj(dWaveOp[i][nu]))
mesh.parameters[0].lbc._n = n_bcx_extend_v_ih[0]
mesh.parameters[0].rbc._n = n_bcx_extend_v_ih[1]
v_tmp = mesh.unpad_array(qhat)
mesh._shapes[(False, False)] = mesh_ih._shapes[(False, False)]
mesh._shapes[(False, True)] = mesh_ih._shapes[(False, True)]
mesh.parameters[0].lbc._n = mesh_ih.parameters[0]['lbc'].n
mesh.parameters[0].rbc._n = mesh_ih.parameters[0]['rbc'].n
Ic_data_tmp[:, ih] = (weight*v_tmp*u_tmp).reshape((-1,))
Ic.data -= Ic_data_tmp
# If the imaging component needs to be computed, do it
retval = dict()
if 'adjointfield' in return_parameters:
retval['adjointfield'] = Qhats
if 'dWaveOpAdj' in return_parameters:
retval['dWaveOpAdj'] = DWaveOpAdj
# for i in range(len(Ic)):
# Ic[i] = Ic[i].without_padding()
# Ic.data = np.real(Ic.data)
# If the imaging component needs to be computed, do it
if 'imaging_condition' in return_parameters:
retval['imaging_condition'] = Ic
return retval
def linear_forward_model(self, shot, m0, m1, frequencies, return_parameters=[], dWaveOp0=None):
"""Applies the forward model to the model for the given solver.
Parameters
----------
shot : pysit.Shot
Gives the source signal approximation for the right hand side.
m1 : solver.ModelParameters
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'dWaveOp0', 'wavefield1', 'dWaveOp1', 'simdata', 'simdata_time'}, optional
Values to return.
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* u1 is used as the target field universally. It could be velocity potential, it could be displacement, it could be pressure.
* u1tt is used to generically refer to the derivative of u1 that is needed to compute the imaging condition.
* If u0tt is not specified, it may be computed on the fly at potentially high expense.
"""
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Local references
solver = self.solver
# this updates dt and the number of steps so that is appropriate for the current model
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
source = shot.sources
# added the padding_mode by Zhilong, still needs to discuss which padding mode to use
m1_padded = m1.with_padding(padding_mode='edge')
# m1_padded = m1.with_padding(padding_mode=None)
# Storage for the field
u1hats = dict()
# Setup data storage for the forward modeled data
if 'simdata' in return_parameters:
simdata = dict()
# Storage for the time derivatives of p
if 'dWaveOp0' in return_parameters:
dWaveOp0ret = dict()
# Storage for the time derivatives of p
if 'dWaveOp1' in return_parameters:
dWaveOp1 = dict()
if dWaveOp0 is None:
solver_data_u0 = solver.SolverData()
solver_data = solver.SolverData()
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
for nu in frequencies:
if dWaveOp0 is None:
rhs = solver.build_rhs(mesh.pad_array(source.f(nu=nu)), rhs_wavefieldvector=rhs)
solver.solve(solver_data_u0, rhs, nu)
u0hat = solver_data_u0.k.primary_wavefield
dWaveOp0_nu = solver.compute_dWaveOp('frequency', u0hat, nu)
else:
dWaveOp0_nu = dWaveOp0[nu]
if 'dWaveOp0' in return_parameters:
dWaveOp0ret[nu] = dWaveOp0_nu
rhs_ = m1_padded*(-1*dWaveOp0_nu)
# make the rhs vector the correct length
rhs = solver.build_rhs(rhs_, rhs_wavefieldvector=rhs)
solver.solve(solver_data, rhs, nu)
u1hat = solver_data.k.primary_wavefield
# Store the wavefield
if 'wavefield1' in return_parameters:
u1hats[nu] = mesh.unpad_array(u1hat, copy=True)
# Compute the derivative
if 'dWaveOp1' in return_parameters:
dWaveOp1[nu] = solver.compute_dWaveOp('frequency', u1hat, nu)
# Extract the data
if 'simdata' in return_parameters:
simdata[nu] = shot.receivers.sample_data_from_array(mesh.unpad_array(u1hat))
retval = dict()
if 'dWaveOp0' in return_parameters:
retval['dWaveOp0'] = dWaveOp0ret
if 'wavefield1' in return_parameters:
retval['wavefield1'] = u1hats
if 'dWaveOp1' in return_parameters:
retval['dWaveOp1'] = dWaveOp1
if 'simdata' in return_parameters:
retval['simdata'] = simdata
return retval
def linear_forward_model_extend(self, shots, m0, m1_extend, frequencies, max_sub_offset, h, return_parameters=[], DWaveOp0In=None):
"""Applies the extended linear forward model to the model for the given solver.
Parameters
----------
shots : a list of pysit.Shot
Gives the source signal approximation for the right hand side.
m0 : background model, solver.ModelParameters
m1_extend : extended model perturbation, it is a structure of ExtendedModelingParameter2D
max_offset: maximum subsurface offset for extended modeling
h : subsurface offest interval
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'dWaveOp0', 'wavefield1', 'dWaveOp1', 'simdata', 'simdata_time'}, optional
Values to return.
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* u1 is used as the target field universally. It could be velocity potential, it could be displacement, it could be pressure.
* u1tt is used to generically refer to the derivative of u1 that is needed to compute the imaging condition.
* If u0tt is not specified, it may be computed on the fly at potentially high expense.
"""
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Local references
solver = self.solver
# this updates dt and the number of steps so that is appropriate for the current model
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
nh = 2*int(max_sub_offset / h) + 1
# # added the padding_mode by Zhilong, still needs to discuss which padding mode to use
# m1_padded = m1.with_padding(padding_mode='edge')
# Storage for the field
u1hats = dict()
# Setup data storage for the forward modeled data
if 'simdata' in return_parameters:
Simdata = dict()
# Storage for the time derivatives of p
if 'dWaveOp0' in return_parameters:
DWaveOp0ret = dict()
# Storage for the time derivatives of p
if 'dWaveOp1' in return_parameters:
DWaveOp1 = dict()
if 'wavefield1' in return_parameters:
U1hats = dict()
if DWaveOp0In is None:
solver_data_u0 = solver.SolverData()
solver_data = solver.SolverData()
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
rhslin = solver.WavefieldVector(mesh, dtype=solver.dtype)
sh_sub = m1_extend.sh_sub
dof_sub = m1_extend.dof_sub
# Create a fake mesh structrue to perform intermediate padding and unpadding operators
mesh_ih = copy.deepcopy(mesh)
for i in range(len(shots)):
shot = shots[i]
source = shot.sources
if 'simdata' in return_parameters:
Simdata[i] = dict()
if 'dWaveOp0' in return_parameters:
DWaveOp0ret[i] = dict()
if 'dWaveOp1' in return_parameters:
DWaveOp1[i] = dict()
if 'wavefield1' in return_parameters:
U1hats[i] = dict()
for nu in frequencies:
# m1_padded = m1.with_padding(padding_mode='edge')
if DWaveOp0In is None:
rhs = solver.build_rhs(mesh.pad_array(source.f(nu=nu)), rhs_wavefieldvector=rhs)
solver.solve(solver_data_u0, rhs, nu)
u0hat = solver_data_u0.k.primary_wavefield
dWaveOp0_nu = solver.compute_dWaveOp('frequency', u0hat, nu)
else:
dWaveOp0_nu = DWaveOp0In[i][nu]
if 'dWaveOp0' in return_parameters:
DWaveOp0ret[i][nu] = dWaveOp0_nu
rhslin.data = 0.0
for ih in range(0, nh):
m1_ih = m1_extend.data[:, ih]
m1_ih = m1_ih.reshape((m1_ih.size, 1))
n_bcx_extend_u_ih = m1_extend.n_bcx_extend_u[ih, :]
n_bcx_extend_v_ih = m1_extend.n_bcx_extend_v[ih, :]
mesh._shapes[(False, False)] = (dof_sub, 1)
mesh._shapes[(False, True)] = sh_sub
mesh.parameters[0].lbc._n = n_bcx_extend_u_ih[0]
mesh.parameters[0].rbc._n = n_bcx_extend_u_ih[1]
rhs_ = m1_ih * (mesh.unpad_array(-1*dWaveOp0_nu))
mesh.parameters[0].lbc._n = n_bcx_extend_v_ih[0]
mesh.parameters[0].rbc._n = n_bcx_extend_v_ih[1]
rhs_ = mesh.pad_array(rhs_)
mesh._shapes[(False, False)] = mesh_ih._shapes[(False, False)]
mesh._shapes[(False, True)] = mesh_ih._shapes[(False, True)]
mesh.parameters[0].lbc._n = mesh_ih.parameters[0]['lbc'].n
mesh.parameters[0].rbc._n = mesh_ih.parameters[0]['rbc'].n
# rhs_ = m1_padded*(-1*dWaveOp0_nu)
# make the rhs vector the correct length
rhs_tmp = solver.build_rhs(rhs_, rhs_wavefieldvector=rhs)
rhslin.data = rhslin.data + rhs_tmp.data
solver.solve(solver_data, rhslin, nu)
u1hat = solver_data.k.primary_wavefield
# Store the wavefield
if 'wavefield1' in return_parameters:
U1hats[i][nu] = mesh.unpad_array(u1hat, copy=True)
# Compute the derivative
if 'dWaveOp1' in return_parameters:
DWaveOp1[i][nu] = solver.compute_dWaveOp('frequency', u1hat, nu)
# Extract the data
if 'simdata' in return_parameters:
Simdata[i][nu] = shot.receivers.sample_data_from_array(mesh.unpad_array(u1hat))
retval = dict()
if 'dWaveOp0' in return_parameters:
retval['dWaveOp0'] = DWaveOp0ret
if 'wavefield1' in return_parameters:
retval['wavefield1'] = U1hats
if 'dWaveOp1' in return_parameters:
retval['dWaveOp1'] = DWaveOp1
if 'simdata' in return_parameters:
retval['simdata'] = Simdata
return retval
def linear_forward_model_kappa(self, shot, m0, m1, frequencies, return_parameters=[], dWaveOp0=None, wavefield=None):
"""Applies the forward model to the model for the given solver in terms of a pertubation of kappa
Parameters
----------
shot : pysit.Shot
Gives the source signal approximation for the right hand side.
m1 : solver.ModelParameters
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'dWaveOp0', 'wavefield1', 'dWaveOp1', 'simdata', 'simdata_time'}, optional
Values to return.
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* u1 is used as the target field universally. It could be velocity potential, it could be displacement, it could be pressure.
* u1tt is used to generically refer to the derivative of u1 that is needed to compute the imaging condition.
* If u0tt is not specified, it may be computed on the fly at potentially high expense.
"""
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Local references
solver = self.solver
# this updates dt and the number of steps so that is appropriate for the current model
solver.model_parameters = m0
mesh = solver.mesh
d = solver.domain
source = shot.sources
model_1 = 1.0/m1.kappa
model_1 = mesh.pad_array(model_1)
# Storage for the field
u1hats = dict()
# Setup data storage for the forward modeled data
if 'simdata' in return_parameters:
simdata = dict()
# Storage for the time derivatives of p
if 'dWaveOp0' in return_parameters:
dWaveOp0ret = dict()
# Storage for the time derivatives of p
if 'dWaveOp1' in return_parameters:
dWaveOp1 = dict()
if dWaveOp0 is None:
solver_data_u0 = solver.SolverData()
solver_data = solver.SolverData()
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
for nu in frequencies:
if dWaveOp0 is None:
rhs = solver.build_rhs(mesh.pad_array(source.f(nu=nu)), rhs_wavefieldvector=rhs)
solver.solve(solver_data_u0, rhs, nu)
u0hat = solver_data_u0.k.primary_wavefield
dWaveOp0_nu = solver.compute_dWaveOp('frequency', u0hat, nu)
else:
dWaveOp0_nu = dWaveOp0[nu]
if 'dWaveOp0' in return_parameters:
dWaveOp0ret[nu] = dWaveOp0_nu
rhs_ = model_1*(-1*dWaveOp0_nu)
# make the rhs vector the correct length
rhs = solver.build_rhs(rhs_, rhs_wavefieldvector=rhs)
solver.solve(solver_data, rhs, nu)
u1hat = solver_data.k.primary_wavefield
# Store the wavefield
if 'wavefield1' in return_parameters:
u1hats[nu] = mesh.unpad_array(u1hat, copy=True)
# Compute the derivative
if 'dWaveOp1' in return_parameters:
dWaveOp1[nu] = solver.compute_dWaveOp('frequency', u1hat, nu)
# Extract the data
if 'simdata' in return_parameters:
simdata[nu] = shot.receivers.sample_data_from_array(mesh.unpad_array(u1hat))
retval = dict()
if 'dWaveOp0' in return_parameters:
retval['dWaveOp0'] = dWaveOp0ret
if 'wavefield1' in return_parameters:
retval['wavefield1'] = u1hats
if 'dWaveOp1' in return_parameters:
retval['dWaveOp1'] = dWaveOp1
if 'simdata' in return_parameters:
retval['simdata'] = simdata
return retval
def linear_forward_model_rho(self, shot, m0, m1, frequencies, return_parameters=[], dWaveOp0=None, wavefield=None):
"""Applies the forward model to the model for the given solver in terms of a pertubation of rho
Parameters
----------
shot : pysit.Shot
Gives the source signal approximation for the right hand side.
m1 : solver.ModelParameters
frequencies : list of 2-tuples
2-tuple, first element is the frequency to use, second element the weight.
return_parameters : list of {'dWaveOp0', 'wavefield1', 'dWaveOp1', 'simdata', 'simdata_time'}, optional
Values to return.
Returns
-------
retval : dict
Dictionary whose keys are return_parameters that contains the specified data.
Notes
-----
* u1 is used as the target field universally. It could be velocity potential, it could be displacement, it could be pressure.
* u1tt is used to generically refer to the derivative of u1 that is needed to compute the imaging condition.
* If u0tt is not specified, it may be computed on the fly at potentially high expense.
"""
# Sanitize the input
if not np.iterable(frequencies):
frequencies = [frequencies]
# Local references
solver = self.solver
# this updates dt and the number of steps so that is appropriate for the current model
solver.model_parameters = m0
mesh = solver.mesh
sh = mesh.shape(include_bc=True, as_grid=True)
d = solver.domain
source = shot.sources
model_2 = 1.0/m1.rho
model_2 = mesh.pad_array(model_2)
rp = dict()
rp['laplacian'] = True
print("WARNING: Ian's operators are still used here even though the solver has changed. These tests need to be updated.")
Lap = build_heterogenous_matrices(
sh, [mesh.x.delta, mesh.z.delta], model_2.reshape(-1,), rp=rp)
# Storage for the field
u1hats = dict()
# Setup data storage for the forward modeled data
if 'simdata' in return_parameters:
simdata = dict()
# Storage for the time derivatives of p
if 'dWaveOp0' in return_parameters:
dWaveOp0ret = dict()
# Storage for the time derivatives of p
if 'dWaveOp1' in return_parameters:
dWaveOp1 = dict()
if dWaveOp0 is None:
solver_data_u0 = solver.SolverData()
solver_data = solver.SolverData()
rhs = solver.WavefieldVector(mesh, dtype=solver.dtype)
for nu in frequencies:
u0_hat = wavefield[nu]
u0_hat = mesh.pad_array(u0_hat)
if dWaveOp0 is None:
rhs = solver.build_rhs(mesh.pad_array(source.f(nu=nu)), rhs_wavefieldvector=rhs)
solver.solve(solver_data_u0, rhs, nu)
u0hat = solver_data_u0.k.primary_wavefield
dWaveOp0_nu = solver.compute_dWaveOp('frequency', u0hat, nu)
else:
dWaveOp0_nu = dWaveOp0[nu]
if 'dWaveOp0' in return_parameters:
dWaveOp0ret[nu] = dWaveOp0_nu
rhs_ = Lap*u0_hat
# make the rhs vector the correct length
rhs = solver.build_rhs(rhs_, rhs_wavefieldvector=rhs)
solver.solve(solver_data, rhs, nu)
u1hat = solver_data.k.primary_wavefield
# Store the wavefield
if 'wavefield1' in return_parameters:
u1hats[nu] = mesh.unpad_array(u1hat, copy=True)
# Compute the derivative
if 'dWaveOp1' in return_parameters:
dWaveOp1[nu] = solver.compute_dWaveOp('frequency', u1hat, nu)
# Extract the data
if 'simdata' in return_parameters:
simdata[nu] = shot.receivers.sample_data_from_array(mesh.unpad_array(u1hat))
retval = dict()
if 'dWaveOp0' in return_parameters:
retval['dWaveOp0'] = dWaveOp0ret
if 'wavefield1' in return_parameters:
retval['wavefield1'] = u1hats
if 'dWaveOp1' in return_parameters:
retval['dWaveOp1'] = dWaveOp1
if 'simdata' in return_parameters:
retval['simdata'] = simdata
return retval
def adjoint_test_kappa():
# if __name__ == '__main__':
# from pysit import *
import numpy as np
import matplotlib.pyplot as plt
from numpy.random import uniform
from pysit import PML, Dirichlet, RectangularDomain, CartesianMesh, PointSource, ReceiverSet, Shot, ConstantDensityAcousticWave, VariableDensityHelmholtz, generate_seismic_data, PointReceiver, RickerWavelet
from pysit.gallery.horizontal_reflector import horizontal_reflector
# Define Domain
bc = PML(0.3, 100, ftype='quadratic')
# bc = Dirichlet()
x_config = (0.1, 1.0, bc, bc)
z_config = (0.1, 0.8, bc, bc)
d = RectangularDomain(x_config, z_config)
m = CartesianMesh(d, 70, 90)
# Generate true wave speed
# (M = C^-2 - C0^-2)
C, C0, m, d = horizontal_reflector(m)
w = 1.3
M = [w*C, C/w]
M0 = [C0, C0]
# Set up shots
Nshots = 1
shots = []
xmin = d.x.lbound
xmax = d.x.rbound
nx = m.x.n
zmin = d.z.lbound
zmax = d.z.rbound
point_approx = 'delta'
for i in range(Nshots):
# Define source location and type
source = PointSource(m, (.188888, 0.18888), RickerWavelet(10.0), approximation=point_approx)
# Define set of receivers
zpos = zmin + (1./9.)*zmax
xpos = np.linspace(xmin, xmax, nx)
receivers = ReceiverSet(m, [PointReceiver(m, (x, zpos)) for x in xpos])
# Create and store the shot
shot = Shot(source, receivers)
shots.append(shot)
# Define and configure the wave solver
# trange=(0.,3.0)
freqs = [3.0, 5.0, 7.0]
solver = VariableDensityHelmholtz(m,
model_parameters={'kappa': M[0], 'rho': M[1]},
spatial_shifted_differences=False,
spatial_accuracy_order=2)
# Generate synthetic Seismic data
print('Generating data...')
base_model = solver.ModelParameters(m, {'kappa': M[0], 'rho': M[1]})
generate_seismic_data(shots, solver, base_model, frequencies=freqs)
tools = FrequencyModeling(solver)
m0 = solver.ModelParameters(m, {'kappa': M[0], 'rho': M[1]})
np.random.seed(0)
m1 = m0.perturbation()
v = uniform(0.5, 1.5, len(m0.kappa)).reshape((len(m0.kappa), 1))
# v is pertubation of model 1/kappa. (which we have declared as m1). Thus, kappa is 1/v.
m1.kappa += 1.0/v
# freqs = np.linspace(3,20,20)
fwdret = tools.forward_model(shot, m0, freqs, ['wavefield', 'dWaveOp', 'simdata'])
data = fwdret['simdata']
dWaveOp0 = fwdret['dWaveOp']
u0hat = fwdret['wavefield']
# data -= shot.receivers.interpolate_data(solver.ts())
# data *= -1
# for nu in freqs:
# data[nu] += np.random.rand(*data[nu].shape)
linfwdret = tools.linear_forward_model_kappa(shot, m0, m1, freqs, ['simdata', 'wavefield1'])
lindata = linfwdret['simdata']
u1hat = linfwdret['wavefield1'][freqs[0]]
adjret = tools.adjoint_model(shot, m0, data, freqs, return_parameters=[
'imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0, wavefield=u0hat)
qhat = adjret['adjointfield'][freqs[0]]
adjmodel = adjret['imaging_condition'].kappa
# adjret2 = tools.adjoint_model(shot, m0, lindata_time, freqs, return_parameters=['imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0)
## qhat = adjret['adjointfield'][freqs[0]]
# adjmodel2 = adjret2['imaging_condition'].view(np.ndarray)
temp_data_prod = 0.0
for nu in freqs:
temp_data_prod += np.dot(lindata[nu].reshape(data[nu].shape).T, np.conj(data[nu]))
print("data space: ", temp_data_prod.squeeze())
print("model space: ", np.dot(v.T, np.conj(adjmodel)).squeeze()*np.prod(m.deltas))
print("their diff: ", np.dot(v.T, np.conj(adjmodel)).squeeze()
* np.prod(m.deltas) - temp_data_prod.squeeze())
def adjoint_test_rho():
# if __name__ == '__main__':
# from pysit import *
import numpy as np
import matplotlib.pyplot as plt
from numpy.random import uniform
from pysit import PML, Dirichlet, RectangularDomain, CartesianMesh, PointSource, ReceiverSet, Shot, ConstantDensityAcousticWave, VariableDensityHelmholtz, generate_seismic_data, PointReceiver, RickerWavelet
from pysit.gallery.horizontal_reflector import horizontal_reflector
# Define Domain
bc = PML(0.3, 100, ftype='quadratic')
# bc = Dirichlet()
x_config = (0.1, 1.0, bc, bc)
z_config = (0.1, 0.8, bc, bc)
d = RectangularDomain(x_config, z_config)
m = CartesianMesh(d, 70, 90)
# Generate true wave speed
# (M = C^-2 - C0^-2)
C, C0, m, d = horizontal_reflector(m)
w = 1.3
M = [w*C, C/w]
M0 = [C0, C0]
# Set up shots
Nshots = 1
shots = []
xmin = d.x.lbound
xmax = d.x.rbound
nx = m.x.n
zmin = d.z.lbound
zmax = d.z.rbound
point_approx = 'delta'
for i in range(Nshots):
# Define source location and type
source = PointSource(m, (.188888, 0.18888), RickerWavelet(10.0), approximation=point_approx)
# Define set of receivers
zpos = zmin + (1./9.)*zmax
xpos = np.linspace(xmin, xmax, nx)
receivers = ReceiverSet(m, [PointReceiver(m, (x, zpos)) for x in xpos])
# Create and store the shot
shot = Shot(source, receivers)
shots.append(shot)
# Define and configure the wave solver
# trange=(0.,3.0)
freqs = [3.0, 5.0, 7.0]
solver = VariableDensityHelmholtz(m,
model_parameters={'kappa': M[0], 'rho': M[1]},
spatial_shifted_differences=False,
spatial_accuracy_order=2)
# Generate synthetic Seismic data
print('Generating data...')
base_model = solver.ModelParameters(m, {'kappa': M[0], 'rho': M[1]})
generate_seismic_data(shots, solver, base_model, frequencies=freqs)
tools = FrequencyModeling(solver)
m0 = solver.ModelParameters(m, {'kappa': M[0], 'rho': M[1]})
np.random.seed(0)
m1 = m0.perturbation()
# v is pertubation of model 1/rho. (which we have declared as m1). Thus, rho is 1/v.
v = uniform(0.5, 1.5, len(m0.rho)).reshape((len(m0.rho), 1))
m1.rho += 1.0/v
# freqs = np.linspace(3,20,20)
fwdret = tools.forward_model(shot, m0, freqs, ['wavefield', 'dWaveOp', 'simdata'])
data = fwdret['simdata']
dWaveOp0 = fwdret['dWaveOp']
u0hat = fwdret['wavefield']
# data -= shot.receivers.interpolate_data(solver.ts())
# data *= -1
# for nu in freqs:
# data[nu] += np.random.rand(*data[nu].shape)
linfwdret = tools.linear_forward_model_rho(
shot, m0, m1, freqs, ['simdata', 'wavefield1'], wavefield=u0hat)
lindata = linfwdret['simdata']
#u1hat = linfwdret['wavefield1'][freqs[0]]
adjret = tools.adjoint_model(shot, m0, data, freqs, return_parameters=[
'imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0, wavefield=u0hat)
qhat = adjret['adjointfield'][freqs[0]]
adjmodel = adjret['imaging_condition'].rho
# adjret2 = tools.adjoint_model(shot, m0, lindata_time, freqs, return_parameters=['imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0)
## qhat = adjret['adjointfield'][freqs[0]]
# adjmodel2 = adjret2['imaging_condition'].view(np.ndarray)
temp_data_prod = 0.0
for nu in freqs:
temp_data_prod += np.dot(lindata[nu].reshape(data[nu].shape).T, np.conj(data[nu]))
print("data space: ", temp_data_prod.squeeze())
print("model space: ", np.dot(v.T, np.conj(adjmodel)).squeeze()*np.prod(m.deltas))
print("their diff: ", np.dot(v.T, np.conj(adjmodel)).squeeze() * np.prod(m.deltas) - temp_data_prod.squeeze())
def adjoint_test():
# if __name__ == '__main__':
# from pysit import *
import numpy as np
import matplotlib.pyplot as plt
from pysit import PML, Dirichlet, RectangularDomain, CartesianMesh, PointSource, ReceiverSet, Shot, ConstantDensityAcousticWave, ConstantDensityHelmholtz, generate_seismic_data, PointReceiver, RickerWavelet
from pysit.gallery import horizontal_reflector
# Define Domain
bc = PML(0.3, 100, ftype='quadratic')
# bc = Dirichlet()
x_config = (0.1, 1.0, bc, bc)
z_config = (0.1, 0.8, bc, bc)
d = RectangularDomain(x_config, z_config)
m = CartesianMesh(d, 90, 70)
# Generate true wave speed
# (M = C^-2 - C0^-2)
# C0, C = horizontal_reflector(m)
C0, C, m, d = horizontal_reflector(m)
# Set up shots
Nshots = 1
shots = []
xmin = d.x.lbound
xmax = d.x.rbound
nx = m.x.n
zmin = d.z.lbound
zmax = d.z.rbound
point_approx = 'delta'
for i in range(Nshots):
# Define source location and type
source = PointSource(m, (.188888, 0.18888), RickerWavelet(10.0), approximation=point_approx)
# Define set of receivers
zpos = zmin + (1./9.)*zmax
xpos = np.linspace(xmin, xmax, nx)
receivers = ReceiverSet(m, [PointReceiver(m, (x, zpos)) for x in xpos])
# Create and store the shot
shot = Shot(source, receivers)
shots.append(shot)
# Define and configure the wave solver
trange = (0., 3.0)
solver = ConstantDensityAcousticWave(m,
formulation='scalar',
model_parameters={'C': C},
spatial_accuracy_order=4,
trange=trange,
time_accuracy_order=6)
# Generate synthetic Seismic data
print('Generating data...')
base_model = solver.ModelParameters(m, {'C': C})
generate_seismic_data(shots, solver, base_model)
solver_frequency = ConstantDensityHelmholtz(m,
model_parameters={'C': C0},
spatial_shifted_differences=True,
spatial_accuracy_order=4)
tools = FrequencyModeling(solver_frequency)
m0 = solver_frequency.ModelParameters(m, {'C': C0})
np.random.seed(0)
m1 = m0.perturbation()
# m1 += M
m1 += np.random.rand(*m1.data.shape)
# m1 += np.ones(m1.data.shape)
# freqs = [10.0, 10.5, 10.123334145252]
freqs = [10.0]
# freqs = np.linspace(3,20,20)
fwdret = tools.forward_model(shot, m0, freqs, ['wavefield', 'dWaveOp', 'simdata'])
data = fwdret['simdata']
dWaveOp0 = fwdret['dWaveOp']
u0hat = fwdret['wavefield'][freqs[0]]
# data -= shot.receivers.interpolate_data(solver.ts())
# data *= -1
# for nu in freqs:
# data[nu] += np.random.rand(*data[nu].shape)
linfwdret = tools.linear_forward_model(shot, m0, m1, freqs, ['simdata', 'wavefield1'])
lindata = linfwdret['simdata']
u1hat = linfwdret['wavefield1'][freqs[0]]
adjret = tools.adjoint_model(shot, m0, data, freqs, return_parameters=[
'imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0)
qhat = adjret['adjointfield'][freqs[0]]
adjmodel = adjret['imaging_condition'].data
# adjret2 = tools.adjoint_model(shot, m0, lindata_time, freqs, return_parameters=['imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0)
## qhat = adjret['adjointfield'][freqs[0]]
# adjmodel2 = adjret2['imaging_condition'].view(np.ndarray)
m1 = m1.data
temp_data_prod = 0.0
for nu in freqs:
temp_data_prod += np.dot(lindata[nu].reshape(data[nu].shape).T, np.conj(data[nu]))
print(temp_data_prod.squeeze())
print(np.dot(m1.T, np.conj(adjmodel)).squeeze()*np.prod(m.deltas))
print(np.dot(m1.T, np.conj(adjmodel)).squeeze()*np.prod(m.deltas) - temp_data_prod.squeeze())
# temp_data_prod = 0.0
# for nu in freqs:
# temp_data_prod += np.dot(lindata[nu].reshape(dhat[nu].shape), np.conj(lindata[nu].reshape(dhat[nu].shape)))
#
# print temp_data_prod
# print np.dot(m1.T, np.conj(adjmodel2)).squeeze()*np.prod(d.deltas)
# plt.figure()
# plt.subplot(2,3,1)
# display_on_grid(np.real(u0hat), d)
# plt.title(r're(${\hat u_0}$)')
# plt.subplot(2,3,4)
# display_on_grid(np.imag(u0hat), d)
# plt.title(r'im(${\hat u_0}$)')
# plt.subplot(2,3,2)
# display_on_grid(np.real(qhat), d)
# plt.title(r're(${\hat q}$)')
# plt.subplot(2,3,5)
# display_on_grid(np.imag(qhat), d)
# plt.title(r'im(${\hat q}$)')
# plt.subplot(2,3,3)
# display_on_grid(np.real(u1hat), d)
# plt.title(r're(${\hat u_1}$)')
# plt.subplot(2,3,6)
# display_on_grid(np.imag(u1hat), d)
# plt.title(r'im(${\hat u_1}$)')
# plt.show()
# plt.figure()
# plt.subplot(2,3,1)
# display_on_grid(np.real(u0hat), d)
# plt.title(r're(${\hat u_0}$)')
# plt.subplot(2,3,4)
# display_on_grid(np.imag(u0hat), d)
# plt.title(r'im(${\hat u_0}$)')
# plt.subplot(2,3,2)
# display_on_grid(np.real(qhat), d)
# plt.title(r're(${\hat q}$)')
# plt.subplot(2,3,5)
# display_on_grid(np.imag(qhat), d)
# plt.title(r'im(${\hat q}$)')
# plt.subplot(2,3,3)
# display_on_grid(np.real(adjmodel), d)
# plt.title(r're(${m_1}$)')
# plt.subplot(2,3,6)
# display_on_grid(np.imag(adjmodel), d)
# plt.title(r'im(${m_1}$)')
# plt.show()
def extended_modeling_test():
# if __name__ == '__main__':
# from pysit import *
import numpy as np
import matplotlib as mpl
mpl.use('TkAgg')
import matplotlib.pyplot as plt
from pysit import PML, Dirichlet, RectangularDomain, CartesianMesh, PointSource, ReceiverSet, Shot, ConstantDensityAcousticWave, ConstantDensityHelmholtz, generate_seismic_data, PointReceiver, RickerWavelet
from pysit.gallery import horizontal_reflector
# Define Domain
bc = PML(0.3, 100, ftype='quadratic')
# bc = Dirichlet()
x_config = (0.1, 1.0, bc, bc)
z_config = (0.1, 0.8, bc, bc)
d = RectangularDomain(x_config, z_config)
m = CartesianMesh(d, 91, 71)
# Generate true wave speed
# (M = C^-2 - C0^-2)
C0, C, m, d = horizontal_reflector(m)
max_sub_offset = 0.1
h = 0.01
m1_extend = ExtendedModelingParameter2D(m, max_sub_offset, h)
# Set up shots
Nshots = 2
shots = []
xmin = d.x.lbound
xmax = d.x.rbound
nx = m.x.n
zmin = d.z.lbound
zmax = d.z.rbound
point_approx = 'delta'
for i in range(Nshots):
# Define source location and type
source = PointSource(m, (.188888, 0.18888), RickerWavelet(10.0), approximation=point_approx)
# Define set of receivers
zpos = zmin + (1./9.)*zmax
xpos = np.linspace(xmin, xmax, nx)
receivers = ReceiverSet(m, [PointReceiver(m, (x, zpos)) for x in xpos])
# Create and store the shot
shot = Shot(source, receivers)
shots.append(shot)
# Define and configure the wave solver
trange = (0., 3.0)
solver = ConstantDensityAcousticWave(m,
formulation='scalar',
model_parameters={'C': C},
spatial_accuracy_order=4,
trange=trange,
time_accuracy_order=6)
# Generate synthetic Seismic data
print('Generating data...')
base_model = solver.ModelParameters(m, {'C': C})
generate_seismic_data(shots, solver, base_model)
solver_frequency = ConstantDensityHelmholtz(m,
model_parameters={'C': C0},
spatial_shifted_differences=True,
spatial_accuracy_order=4)
tools = FrequencyModeling(solver_frequency)
m0 = solver_frequency.ModelParameters(m, {'C': C0})
# m1_extend.setter(np.random.rand(m1_extend.sh_data[0], m1_extend.sh_data[1]))
m1_extend.setter(np.zeros(m1_extend.sh_data))
d_m = solver.ModelParameters(m, {'C': C})
m1 = m0.perturbation()
dmtmp = d_m.data
dmtmp[np.where(dmtmp <= 2)] = 0
sh_true = m1.mesh._shapes[(False, True)]
dmtmp = np.reshape(dmtmp, sh_true)
sh_cut = m1_extend.sh_sub
dmtmp = dmtmp[0:sh_cut[0], :]
dmtmp = np.ones(dmtmp.shape)
dmtmp[:, 40] = 1.0
dmtmp = dmtmp.reshape(-1)
m1_extend.data[:, (m1_extend.sh_data[1]-1)//2] = dmtmp
freqs = [10.0]
fwdret = tools.forward_model_list(shots, m0, freqs, ['simdata'])
datas = fwdret['simdata']
# datas=[]
# datas.append(data)
# np.random.seed(0)
#
# m1 = m0.perturbation()
# # m1 += M
# m1 += np.random.rand(*m1.data.shape)
# freqs = np.linspace(3,20,20)
linfwdret = tools.linear_forward_model_extend(shots, m0, m1_extend, freqs, max_sub_offset, h, ['simdata'])
lindatas = linfwdret['simdata']
lindatas2 = []
for i in range(len(shots)):
print(i)
shot_i= [shots[i]]
linfwdret = tools.linear_forward_model_extend(shot_i, m0, m1_extend, freqs, max_sub_offset, h, ['simdata'])
lindatas2.append(linfwdret['simdata'][0])
# lindatas = []
# lindatas.append(lindata)
m1 = m0.perturbation()
# m1 += M
# m1 += np.random.rand(*m1.data.shape)
Ic = tools.migrate_shots_extend(shots, m0, datas,
freqs, max_sub_offset, h,
return_parameters=['imaging_condition']
)
# linfwdret2 = tools.linear_forward_model_extend(shots, m0, Ic, freqs, max_sub_offset, h, ['simdata'])
# lindatas2 = linfwdret2['simdata']
# Ic2 = tools.migrate_shots_extend(shots, m0, lindatas2,
# freqs, max_sub_offset, h,
# return_parameters=['imaging_condition']
# )
a = 0.0
for i in range(len(shots)):
for key in lindatas[i]:
a += np.dot(np.conj(lindatas[i][key]).reshape(-1), datas[i][key].reshape(-1))
print(['Data inner product =', a])
Ic_data1 = m1_extend.data.reshape(-1)
Ic_data2 = Ic.data.reshape(-1)
b = np.dot(np.conj(Ic_data1), Ic_data2).squeeze()*np.prod(m.deltas)
print(['Model inner produc =', b])
# m1 += np.ones(m1.data.shape)
# linfwdret = tools.linear_forward_model(shot, m0, m1, freqs, ['simdata', 'wavefield1'])
# lindatas_no1 = linfwdret['simdata']
# c = np.dot(np.conj(lindatas_no1[key]).reshape(-1),lindatas_no1[key].reshape(-1))
# adjret = tools.migrate_shot(shots[0], m0, lindatas_no1, freqs)
# linfwdret = tools.linear_forward_model(shot, m0, adjret, freqs, ['simdata', 'wavefield1'])
# lindatas_no2 = linfwdret['simdata']
# adjret2 = tools.migrate_shot(shots[0], m0, lindatas_no2, freqs)
a = 1
# fwdret = tools.forward_model(shot, m0, freqs, ['wavefield', 'dWaveOp', 'simdata'])
# data = fwdret['simdata']
# dWaveOp0 = fwdret['dWaveOp']
# u0hat = fwdret['wavefield'][freqs[0]]
#
# # data -= shot.receivers.interpolate_data(solver.ts())
# # data *= -1
#
# # for nu in freqs:
# # data[nu] += np.random.rand(*data[nu].shape)
#
# linfwdret = tools.linear_forward_model(shot, m0, m1, freqs, ['simdata', 'wavefield1'])
# lindata = linfwdret['simdata']
# u1hat = linfwdret['wavefield1'][freqs[0]]
#
# adjret = tools.adjoint_model(shot, m0, data, freqs, return_parameters=[
# 'imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0)
# qhat = adjret['adjointfield'][freqs[0]]
# adjmodel = adjret['imaging_condition'].data
#
# # adjret2 = tools.adjoint_model(shot, m0, lindata_time, freqs, return_parameters=['imaging_condition', 'adjointfield'], dWaveOp=dWaveOp0)
# ## qhat = adjret['adjointfield'][freqs[0]]
# # adjmodel2 = adjret2['imaging_condition'].view(np.ndarray)
#
# m1 = m1.data
#
# temp_data_prod = 0.0
# for nu in freqs:
# temp_data_prod += np.dot(lindata[nu].reshape(data[nu].shape).T, np.conj(data[nu]))
#
# print(temp_data_prod.squeeze())
# print(np.dot(m1.T, np.conj(adjmodel)).squeeze()*np.prod(m.deltas))
# print(np.dot(m1.T, np.conj(adjmodel)).squeeze()*np.prod(m.deltas) - temp_data_prod.squeeze())
if __name__ == '__main__':
print("testing extended modeling")
extended_modeling_test()
print("testing constant density")
adjoint_test()
print("testing pertubation of rho:")
adjoint_test_rho()
print("testing pertubation of kappa:")
adjoint_test_kappa()
| 37.81919
| 210
| 0.595255
| 9,600
| 78,437
| 4.740104
| 0.063333
| 0.049929
| 0.038765
| 0.01635
| 0.903219
| 0.891419
| 0.87753
| 0.853994
| 0.84538
| 0.834568
| 0
| 0.019243
| 0.306348
| 78,437
| 2,073
| 211
| 37.837434
| 0.817122
| 0.320843
| 0
| 0.762048
| 0
| 0.002008
| 0.066614
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019076
| false
| 0
| 0.031125
| 0.002008
| 0.063253
| 0.022088
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6331a05f39c0de1583407780f0d9ac4609563f18
| 11,735
|
py
|
Python
|
wolf/flows/macow.py
|
andrecianflone/wolf
|
826bbedc58d4d29871110349356868066a3108e6
|
[
"Apache-2.0"
] | 75
|
2020-03-31T22:21:04.000Z
|
2022-03-20T10:58:17.000Z
|
wolf/flows/macow.py
|
andrecianflone/wolf
|
826bbedc58d4d29871110349356868066a3108e6
|
[
"Apache-2.0"
] | 3
|
2021-02-03T07:07:14.000Z
|
2022-03-08T20:58:43.000Z
|
wolf/flows/macow.py
|
andrecianflone/wolf
|
826bbedc58d4d29871110349356868066a3108e6
|
[
"Apache-2.0"
] | 10
|
2020-04-27T05:31:44.000Z
|
2021-11-21T14:11:16.000Z
|
__author__ = 'max'
from overrides import overrides
from typing import Dict, Tuple
import torch
import torch.nn as nn
from wolf.flows.flow import Flow
from wolf.flows.normalization import ActNorm2dFlow
from wolf.flows.permutation import Conv1x1Flow
from wolf.flows.couplings import NICE2d, MaskedConvFlow
from wolf.flows.multiscale_architecture import MultiScaleArchitecture
class MaCowUnit(Flow):
"""
A Unit of Flows with an MCF(A), MCF(B), an Conv1x1, followd by an ActNorm and an activation.
"""
def __init__(self, in_channels, kernel_size, h_channels=0, inverse=False,
transform='affine', alpha=1.0, h_type=None, activation='relu'):
super(MaCowUnit, self).__init__(inverse)
self.conv1 = MaskedConvFlow(in_channels, (kernel_size[0], kernel_size[1]), order='A',
h_channels=h_channels, transform=transform, alpha=alpha,
h_type=h_type, activation=activation)
self.conv2 = MaskedConvFlow(in_channels, (kernel_size[0], kernel_size[1]), order='B',
h_channels=h_channels, transform=transform, alpha=alpha,
h_type=h_type, activation=activation)
self.actnorm1 = ActNorm2dFlow(in_channels, inverse=inverse)
self.conv3 = MaskedConvFlow(in_channels, (kernel_size[1], kernel_size[0]), order='C',
h_channels=h_channels, transform=transform, alpha=alpha,
h_type=h_type, activation=activation)
self.conv4 = MaskedConvFlow(in_channels, (kernel_size[1], kernel_size[0]), order='D',
h_channels=h_channels, transform=transform, alpha=alpha,
h_type=h_type, activation=activation)
self.actnorm2 = ActNorm2dFlow(in_channels, inverse=inverse)
@overrides
def forward(self, input: torch.Tensor, h=None) -> Tuple[torch.Tensor, torch.Tensor]:
# MCF1
out, logdet_accum = self.conv1.forward(input, h=h)
# MCF2
out, logdet = self.conv2.forward(out, h=h)
logdet_accum = logdet_accum + logdet
# ActNorm1
out, logdet = self.actnorm1.forward(out)
logdet_accum = logdet_accum + logdet
# MCF3
out, logdet = self.conv3.forward(out, h=h)
logdet_accum = logdet_accum + logdet
# MCF4
out, logdet = self.conv4.forward(out, h=h)
logdet_accum = logdet_accum + logdet
# ActNorm2
out, logdet = self.actnorm2.forward(out)
logdet_accum = logdet_accum + logdet
return out, logdet_accum
def backward(self, input: torch.Tensor, h=None) -> Tuple[torch.Tensor, torch.Tensor]:
# ActNorm2
out, logdet_accum = self.actnorm2.backward(input)
# MCF4
out, logdet = self.conv4.backward(out, h=h)
logdet_accum = logdet_accum + logdet
# MCF3
out, logdet = self.conv3.backward(out, h=h)
logdet_accum = logdet_accum + logdet
# ActNorm1
out, logdet = self.actnorm1.backward(out)
logdet_accum = logdet_accum + logdet
# MCF2
out, logdet = self.conv2.backward(out, h=h)
logdet_accum = logdet_accum + logdet
# MCF1
out, logdet = self.conv1.backward(out, h=h)
logdet_accum = logdet_accum + logdet
return out, logdet_accum
@overrides
def init(self, data, h=None, init_scale=1.0) -> Tuple[torch.Tensor, torch.Tensor]:
# MCF1
out, logdet_accum = self.conv1.init(data, h=h, init_scale=init_scale)
# MCF2
out, logdet = self.conv2.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
# ActNorm1
out, logdet = self.actnorm1.init(out, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
# MCF3
out, logdet = self.conv3.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
# MCF4
out, logdet = self.conv4.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
# ActNorm2
out, logdet = self.actnorm2.init(out, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
return out, logdet_accum
class MaCowStep(Flow):
"""
A step of Macow Flows
"""
def __init__(self, in_channels, kernel_size, hidden_channels, h_channels, inverse=False,
transform='affine', alpha=1.0, coupling_type='conv', h_type=None,
activation='relu', normalize=None, num_groups=None, **kwargs):
super(MaCowStep, self).__init__(inverse)
num_units = 2
self.actnorm1 = ActNorm2dFlow(in_channels, inverse=inverse)
self.conv1x1 = Conv1x1Flow(in_channels, inverse=inverse)
units = [MaCowUnit(in_channels, kernel_size, h_channels=h_channels, transform=transform,
alpha=alpha, inverse=inverse, h_type=h_type, activation=activation)
for _ in range(num_units)]
self.units1 = nn.ModuleList(units)
self.coupling1_up = NICE2d(in_channels, hidden_channels=hidden_channels,
h_channels=h_channels, transform=transform, alpha=alpha, inverse=inverse,
type=coupling_type, h_type=h_type, split_type='continuous', order='up',
activation=activation, normalize=normalize, num_groups=num_groups)
self.coupling1_dn = NICE2d(in_channels, hidden_channels=hidden_channels,
h_channels=h_channels, transform=transform, alpha=alpha, inverse=inverse,
type=coupling_type, h_type=h_type, split_type='continuous', order='down',
activation=activation, normalize=normalize, num_groups=num_groups)
self.actnorm2 = ActNorm2dFlow(in_channels, inverse=inverse)
units = [MaCowUnit(in_channels, kernel_size, h_channels=h_channels, transform=transform,
alpha=alpha, inverse=inverse, h_type=h_type, activation=activation)
for _ in range(num_units)]
self.units2 = nn.ModuleList(units)
self.coupling2_up = NICE2d(in_channels, hidden_channels=hidden_channels,
h_channels=h_channels, transform=transform, alpha=alpha, inverse=inverse,
type=coupling_type, h_type=h_type, split_type='skip', order='up',
activation=activation, normalize=normalize, num_groups=num_groups)
self.coupling2_dn = NICE2d(in_channels, hidden_channels=hidden_channels,
h_channels=h_channels, transform=transform, alpha=alpha, inverse=inverse,
type=coupling_type, h_type=h_type, split_type='skip', order='down',
activation=activation, normalize=normalize, num_groups=num_groups)
def sync(self):
self.conv1x1.sync()
@overrides
def forward(self, input: torch.Tensor, h=None) -> Tuple[torch.Tensor, torch.Tensor]:
# part1
out, logdet_accum = self.actnorm1.forward(input)
out, logdet = self.conv1x1.forward(out)
logdet_accum = logdet_accum + logdet
for unit in self.units1:
out, logdet = unit.forward(out, h=h)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling1_up.forward(out, h=h)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling1_dn.forward(out, h=h)
logdet_accum = logdet_accum + logdet
# part 2
out, logdet = self.actnorm2.forward(out)
logdet_accum = logdet_accum + logdet
for unit in self.units2:
out, logdet = unit.forward(out, h=h)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling2_up.forward(out, h=h)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling2_dn.forward(out, h=h)
logdet_accum = logdet_accum + logdet
return out, logdet_accum
@overrides
def backward(self, input: torch.Tensor, h=None) -> Tuple[torch.Tensor, torch.Tensor]:
# part 2
out, logdet_accum = self.coupling2_dn.backward(input, h=h)
out, logdet = self.coupling2_up.backward(out, h=h)
logdet_accum = logdet_accum + logdet
for unit in reversed(self.units2):
out, logdet = unit.backward(out, h=h)
logdet_accum = logdet_accum + logdet
out, logdet = self.actnorm2.backward(out)
logdet_accum = logdet_accum + logdet
# part1
out, logdet = self.coupling1_dn.backward(out, h=h)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling1_up.backward(out, h=h)
logdet_accum = logdet_accum + logdet
for unit in reversed(self.units1):
out, logdet = unit.backward(out, h=h)
logdet_accum = logdet_accum + logdet
out, logdet = self.conv1x1.backward(out)
logdet_accum = logdet_accum + logdet
out, logdet = self.actnorm1.backward(out)
logdet_accum = logdet_accum + logdet
return out, logdet_accum
@overrides
def init(self, data, h=None, init_scale=1.0) -> Tuple[torch.Tensor, torch.Tensor]:
out, logdet_accum = self.actnorm1.init(data, init_scale=init_scale)
out, logdet = self.conv1x1.init(out, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
for unit in self.units1:
out, logdet = unit.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling1_up.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling1_dn.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
# part 2
out, logdet = self.actnorm2.init(out, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
for unit in self.units2:
out, logdet = unit.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling2_up.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
out, logdet = self.coupling2_dn.init(out, h=h, init_scale=init_scale)
logdet_accum = logdet_accum + logdet
return out, logdet_accum
class MaCow(MultiScaleArchitecture):
"""
MaCow model in paper https://arxiv.org/pdf/1902.04208.pdf
"""
def __init__(self, levels, num_steps, in_channels, factors, hidden_channels,
h_channels=0, inverse=False, transform='affine', prior_transform='affine',
alpha=1.0, kernel_size=(2, 3), coupling_type='conv', h_type=None,
activation='relu', normalize=None, num_groups=None):
assert len(kernel_size) == 2, 'kernel size should contain two numbers'
super(MaCow, self).__init__(MaCowStep, levels, num_steps, in_channels, factors,
hidden_channels, h_channels=h_channels, inverse=inverse,
transform=transform, prior_transform=prior_transform, alpha=alpha,
kernel_size=kernel_size, coupling_type=coupling_type, h_type=h_type,
activation=activation, normalize=normalize, num_groups=num_groups)
@classmethod
def from_params(cls, params: Dict) -> "MaCow":
return MaCow(**params)
MaCow.register('macow')
| 46.752988
| 108
| 0.625735
| 1,413
| 11,735
| 4.985846
| 0.094126
| 0.140525
| 0.188219
| 0.121789
| 0.817459
| 0.782967
| 0.77885
| 0.733428
| 0.707026
| 0.692122
| 0
| 0.016304
| 0.278739
| 11,735
| 250
| 109
| 46.94
| 0.816044
| 0.027269
| 0
| 0.535135
| 0
| 0
| 0.012259
| 0
| 0
| 0
| 0
| 0
| 0.005405
| 1
| 0.059459
| false
| 0
| 0.048649
| 0.005405
| 0.162162
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2d6b232504f384dc58e0fde159c4c49dd8bd23d1
| 30,667
|
py
|
Python
|
unopartylib/test/utxolocks_test.py
|
terhnt/unoparty-lib
|
f23bcfc91ac109dc8c248210229c253be7d405ed
|
[
"MIT"
] | null | null | null |
unopartylib/test/utxolocks_test.py
|
terhnt/unoparty-lib
|
f23bcfc91ac109dc8c248210229c253be7d405ed
|
[
"MIT"
] | null | null | null |
unopartylib/test/utxolocks_test.py
|
terhnt/unoparty-lib
|
f23bcfc91ac109dc8c248210229c253be7d405ed
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python3
import pytest
import binascii
from io import BytesIO
import bitcoin
import tempfile
from unopartylib.test import conftest # this is require near the top to do setup of the test suite
from unopartylib.test.util_test import CURR_DIR
from unopartylib.lib import (transaction)
from unopartylib.lib.messages import send
FIXTURE_SQL_FILE = CURR_DIR + '/fixtures/scenarios/parseblock_unittest_fixture.sql'
FIXTURE_DB = tempfile.gettempdir() + '/fixtures.parseblock_unittest_fixture.db'
FIXTURE_OPTIONS = {
'utxo_locks_max_addresses': 2000
}
def construct_tx(db, source, destination, disable_utxo_locks=False, custom_inputs=None):
tx_info = send.compose(db, source, destination, 'XUP', 1)
return transaction.construct(db, tx_info, disable_utxo_locks=disable_utxo_locks, custom_inputs=custom_inputs)
def test_utxolocks(server_db):
transaction.initialise() # reset UTXO_LOCKS
"""it shouldn't use the same UTXO"""
tx1hex = construct_tx(server_db, "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz")
tx2hex = construct_tx(server_db, "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz")
tx1f = BytesIO(binascii.unhexlify(tx1hex))
tx1 = bitcoin.core.CTransaction.stream_deserialize(tx1f)
tx2f = BytesIO(binascii.unhexlify(tx2hex))
tx2 = bitcoin.core.CTransaction.stream_deserialize(tx2f)
assert (tx1.vin[0].prevout.hash, tx1.vin[0].prevout.n) != (tx2.vin[0].prevout.hash, tx2.vin[0].prevout.n)
def test_utxolocks_custom_input(server_db):
transaction.initialise() # reset UTXO_LOCKS
"""it should use the same UTXO"""
custom_inputs = [{
'txid': '2c3416c8742fa71caea929e6cdf10e02fc10dd39d8b5bd36a71498f3173ed0bd',
'txhex': '010000007798e9ab993fc529ef037cf3728ba51dac5ac196fe4bf6194cd319fe5b92a519dd00000000484730440220798071268b37ba27100d528bf67aefaa93fe6be04840143151d28d1522e4415f022010c92ec7612e44dbea898353468868e5a1a2d7b60b5aa18263ce37cbb138712e01feffffff806ac41eae1491fa86a556a368b721e2e0ad16402e226c7ba61eb33122110c76000000004847304402203cfbbb4a34244bcd0d08ae2cfd804dafbcde767e2c36cb9f6a5865e1a3ce5a6502201aa937ce778c53a03a56f9b68002e6ff9e8f9e0ab9c8c34643f56b40df6333d601feffffff3910a19e120805a9831bb2b72036f922e04ba751f15b694dd49308e18aafabd30000000049483045022100c3ee73a84ddde610293d7df6382836517ea20546b2ca5e92c6df6ab3e95ae5fb022068fa398bd549108bbe931f55af3b12b661d583215bc3ebd7ad05c80654383e1901feffffffe7a8010f588721acd3e065c0278192d272cdc6ff45f1d78c456ee021ef77680e0000000049483045022100e7668e5b2fe2b3510bb2029229dd06eb01fa6d7ecb06b4402350e8767787900a02207b5170a76033a1fafa483fb1e30f320238f502bbf566fdbfeaa8b12977e34e1d01feffffff3920853879c39d753cad77f147b4a528442b751a671208d5bda737876b018b3b0000000049483045022100fa959bf2a1ff7a7bc60faa8b262215f7ec947cf9780d068b2901d52942fbfa94022071e593aa6d47c3c7262cda498b4c2ee1a4f8c29b7d43fa00fb667c206a5642db01feffffffcf011a5a8e5c59ced7205eb5d3b8225b82a708e1057c4420c2040e212820ba850000000049483045022100d654640e844c642e19952610a2ca9423958e1bfd583d17f2988e7e697dbcb8f602203e81d746b14bd4206a60d5447a40e2329bf8a737012a5a015bf9f3c8161ba31b01feffffff97db63374d570a6933a2e7a8ea2825271c55ae4af79850bc27249c4e691bebe9000000004847304402201100af776e2878877b012e12b468a39728b1344161e83dde080e22030906ed130220168f6c5f5b41f335233a5976370082ee16052e2fe68a3c104ce28347c6c151d101feffffff30798c1411d15594266a363cd361bbffe881fd6b40506dfc48e203cdce3452ef0000000049483045022100a946852106c7c4ec6e9db04d10a81e3d0425b1ce2bb165e5253e69bf052019f7022060b05498bdf5a8930b1a5ec9a612ee54f03c9edc769eeb8e2f7dafe55cf3178101feffffff9d7bc10c83b2bca0202ae8a1d7aba2d3cbbfe4514c3a02885c61fb147464b4da000000006b483045022100c7ad0d9e4863a2682647500b12dad7abd8f7f7f2e65fd67b76084633ec8bd25a0220166f0809b0f3a038cd1c9c128e1295dd9a2975f63ceb332b9751044f0cfc3f8e012102a5cc8619394ebe13715b352021c84d2d00f847b6be80d016768c0de4b1dbe677feffffff9d7bc10c83b2bca0202ae8a1d7aba2d3cbbfe4514c3a02885c61fb147464b4da010000006b483045022100e1917231ab9a4e4e046d83197370d0c8eeb5f2303f7847af0cb6b850be33e46402205e295c15d977405b5e40e866183cdd164e5957aa8142d0c7ce32a6f89fdd8c6e012102ce0b4717107f6ef463a47cefb33167a3105e1d47e9dc6c3abfe9c2ebe84a21c1feffffff87d3538b5cc94b4d8e6c9440367b019416f8c8c2d23316a46805fdb78dde2e05000000004847304402202255a5d1c9d583f27096e30254e781d6a41fa417439dfc17569f30065a17047002205a7a240f0d275b819c9eb69dbe8e9256a2f5d1c7abc0cfe3fc197c7abc78655301feffffffc08954ea987c934776b5d167ce979e778e4fbbef0507217abb5ad18e4018bb8f0000000048473044022014730fc66f961b4cefb4a0a77d6d9e5188dc0c3154c107fe494504b5ead7168f02200b93dfca4f5a610f0f8ac5cf4802ed4e3b6f5522ca3078a6fc5ed020ba1f0ba801feffffff12764179950c9f8465627e40f3f0eb038977d76bbc42224f1dfafcedd6b62713000000004948304502210096825929057b25e0bbe25bf3c31595101cc9e326e3482e2d4164d3c5141bdded02204885bc72aec873df594d46c363b596c3c93efc13f0b8f0ca917ccea6975bc82b01feffffffb84b4beb29944df2fb660a474d81829bc8c6ebb862522a8ab1fa5dc2f06dae73000000004847304402200d9115764fe1c93d71d62117ba6904f5c50f965d3174f0b7d30d0b5ca9121442022018bbb8c764b45a3c9494713f67127a1c05ad3ee42978c06c11ad36cd8e97da1f01feffffffcf92e808b9b0cc5307a4f31e87714a4255a8535b4e8ae7e9908cc9a20eb127020000000049483045022100d74cd84b7a6eaf0642f6077a2e2b479a703a1376c834ebf3fae4296f1f6cd49702204cd7b5eacd12f7bfe80d9d231bec61d936c8b75c17decb27646d03f36eb5eafd01feffffff841df797648e2a1302048cc9c439c50d8f4745376280981501a76c773e380ab50000000049483045022100961c352929cec443fd1242d820a795d8ea96fe610bdf6ba74afdaa62cf1d09b602203fbaffb90f03d02004173752c52534737ff9798301ab24240be4f30df6b583bd01feffffffb5b2645f9f8f807f68f1b8f947740c6f6767ae3e398d602415e2e69db9446b34000000004847304402203b42dfc39d8649eebf69bc86ff70c57b67ffc44c74e3be373af359aee859a069022014006d2d2f38caec9989d65127eb6293695f705b4020008436703b0865a4545801feffffff69fec4bfc9660ecce6dc0b54e6189064bd0e51b67bc64fe6953d9ac0d150355f0000000048473044022013952a5b36c985296911fc8d7ffd179b157c36d031acd402b73b1ac98175721a022044325591dae378a214d599846e6dbb68a435ddb2c41be04b4944a845ef03667401feffffffa71ba7c5f50b9fec8bddcf417d067b157c7b157871f4aa2b42e0504ac0bc75530000000048473044022071988222b55b0a2a0b917a88dd2d237f09d97ff1792197c359d1c6850d53d6070220236613f8b43183c3996530ae1b5a857cc5721dff1d8c0b7be8e52c3b9d6c1d8e01feffffff95d64bebce9ad5163aec589c069a9d6cfcab407314cb4cae22c5d4a25bc296fc000000004847304402203a1bac7fbcd3f6441055e2845374689bb6a53e4de63fe52879dcd0a589de53a60220752626f08030d0719f4a67388c30db8311e7c1020f3520d8351b3ae7c3d2ee2201feffffff0c67d6143e63eeb739fbde1cffd3ba404996870f1c66e2b470a0ce929c53343900000000484730440220161e92aafaeb67633ad503334190402ec6bcc01484ff491ce4804a97d2544a680220703cfa22b095cf271c700d6585a544abb56155f2f17bd39e9710516a5468146201feffffffe021350751f658b8907a939a2fd4c03ee1069266ea0eb39c2a1496750d7889b2000000004948304502210093443cd7eae1131affe8f37d2ed52e130de0ecd972536ca6611aa429b96ab2ec022057c7f5d61622c238ce53fee374adaa7159c58a5f3b1b85861e8d2c0fd9cec93e01feffffffed8e43b66feb1203fed6c4560b1cbb11f9c7050474e2ae0d711301a88cd0f4b00000000048473044022002c057d93d8d12dff56e3e63f31cc011ebd4bca8d627705c341b83e747671664022063b908ae5fa291ded58b170b679400ffc5b9f5360a979e335a94af8506fb1ad701feffffffb2ec67080ab4cc0b6fc5045982dbed029c715de31e7388025b04f8c4f0b051fc000000004847304402201cf6cafdc1877e28f593fe6cda221ab1ddfa249c87dc6f19f8001c7920fb321f0220250fc4517d8b6c5066b53cf38479de2cc1f7a65043ed118d76431f301140461301feffffffbed52eed16d1b07d549226a01ddc64450590b63ae93dcdc2e686177506a7d9e20000000048473044022073f36b22453d34c91425c791221add19e6a047b9536369fd377a7d4be6fdf43402206b801fa4534055c948d39b81c32a7e55a16d280535c46c7c6ae51a85ff5dbda801feffffff3177ec65aa2815266c47f5d82cf561055d65d79089049670979eb45d6f01a61d0000000049483045022100c1b5c65b2f16365412a284399db1fe98b7e6161eaeaace860b03b0ccc02c6aae022076525bbaf51551ff1fdce46bfb272d5884258891f96b557257b200d068602e4a01feffffffe33619de67078aa6d7a19150188977ca2186e753b74ac91c254159716b699f100000000049483045022100f9dacda073631000b68a51c3b1d40fc23d55e955269b0dc83cb32afb803d32a502204c31ee06323e9404311b0833ff44d57061b5797be549a438ccaf0c817a01edfc01feffffff92879373f4e8abfc17f51451518bf5eaabd0945bab353d821e219cd8625398920000000049483045022100aa809c42feee203f561dc4702b6702c35fcf18f58510413d122cdace6bf9afe702202f718b4b11de2c7c5870750e0236ce887a85186f241aaeed25303222034ecead01feffffff55b98a91905ee6951047f84a47febe38a4f3c75ccc90d933e05e08714630ceda0000000048473044022064f14fa6c06e1fc72093afcc6f0153e455590b24adace75a649e29e68a42395802200556ba1ece78dc526236900eca6290a332b2c7ab419c113023f1f571316a75a501feffffff9cabfd7e2f71ed2074a1f6654838c008686fa0bf02ac27f0c9e49d27d4a9a1780000000049483045022100922fade6ffd4cc8def98c7be57fb64736751441b329ca382ad71a5d3e7beadae02207a1dbd732a17d8b592e0d6716317bd01637879d1e0aa004a42544abc7a0a48d901feffffffe558bf9ba9c36101d48b71f430fa96b927b74345b91fb78b3d21b3ba1309cb5a00000000494830450221008487fd70be40dc9878968172adce8567e4e4938fe458a523d9a4d55ae572bc06022035049e38e2db28bfb567752fe1dc65c8eef9c20e4c62787b7d79a34da93e2e7601feffffffa8ae44d4da04a37e8de948b83e8ea8eb326957f0153ff740940db5fdfb21741700000000484730440220641a1ca750f8f155b7043bb16451aefc19f3e2e968f5eaf71f0880c29d4bc7ef02204a86fc1e42984ca160e35e8ef335a15b9df6c4bb1721641c3c0bcb2aaf8b88ba01feffffffaee6904568f3b6ff8ec0629819951f9d90aebfe11c006ee6142af5a5dd7ca3f70000000048473044022014af97edde5dfac866446f74797dc5b45b678a47f4cda7b54e33167c676ffd390220100ef92cd07ce5369971c2022ab77d42cb50439fa15171d69b04caa594c8c94501feffffff5e75890530f0500f3eaa2d4c3dc9ddadad86767a69d07cadb491005ecfe8ea5e0000000049483045022100e796f4fa793e2ca75694e34a52593a7dc1ddc53d2c64757e21ab4545c2b65c3c022055f84375dcdb8d5449177bc0d8132d95a0ed55bc2c797a4f9f74d319b1444c2301feffffffca0d905a48095557e478228700f5146db1a4651d981db2b65ac4f6fabfe2d92f0000000048473044022041d7dcd72b6d3996731a9baae00f18111a71efcbca4dd670dbc18cf3966cfc47022020659ba608fa7416b3d450b027d876bb5f97fc66996b9d1d9a6466afe19adb1a01feffffff4e46599d5e53c1a78cb8e99c1c65257754700ca5e3ef9078f14fd4b463d745e800000000494830450221008f441e26b3c78ea7a8e3fcdcee4a92876e711c2df091dca61b8cdecff202d5f60220647b31de5d6f00b98813c43e699296f70d25eab0ab6df61045bec87a7b7e84e801feffffff0fc38b1dfec744e27e104ff75dc13f34f6ce614b8c49e26c9210f910aca76afc0000000048473044022031b7a462e90202f068d9f8a1afbd471eb6c38a62b25c3a5052b778cd6b9797290220242d79aa68a3934ddd5833cf41b01e75f32bcc64a47fa3cea4502f406181c4f401feffffff66e1736b65bcbd9eff9469343970f339e9557e1056aa7232d027c8da7f0609720000000049483045022100b868cd060b99059c065d7ff6048f5b3fcd4eae7571a0aaefd4293ebc1eea8451022032c7e0d67ff2027026bf658574bc0db5c1cc832637dbc9b84af4a05cddaeb79201feffffffdfcfe6cca96d1f7537de60155e9e3402800db953e2d3412db837d44399e7c6470000000049483045022100e58e4eb8ef81b30d08f8d4231494c958732f1b0a8a3f56a66e3f6f34fd37cf7f02204b84145df04e6c904483e85548c16c0105f566457a58ff41c3593a724101299c01feffffffdd7033820319f13c1a914bf792a56a880d98e7422955f6d0646fad462d67f61a00000000484730440220207f8641e523d313257d9a4934d3ffd00e196125ba73a51e7aad8c2fd24428b7022015dbd729e0758ae7e51e34fd8474e62bcc2e0c55a656ab5d0c9ba586f9c2d9db01feffffff1a15744ff2131f3bfba3128dfeca241ebf5f082fcabe9a8b9d7dcc6177be0ef3000000004847304402205b2ab54ad67a8712212e62a57b15037832cdf182cbab016dcc602e4a39e2682e02204d75649dcbb440143b6234455fcc8d25e8185b856a8d2e39826fb0c9428271ce01feffffff71cda893759174e4aa695d3c19d129303b6ca63f7f6ef89019a616edf459ca63000000004847304402207c07c57d91784ed219835ec652267224b2c35dcebd3cd5307fbcd6bd7e975d5a02205ad1ff19ad888d6a774f9feb61674066a1c3f1f0efb43e60e803c711f631adf601feffffff0aaff7b56842b8fa68832ba83919449e52e6911d709e1731ecf8a5d79cf05dba000000004847304402204fe54062e6731e9ac3363f5c3121c9002718f146951dff8ba3fd6e285e339fcb02200b0ff117a007b94c7b1ae77b225bca3b08a5068729dacfa64ba471420260f2e801feffffffaa4f18e008150f71661d00c6a8a23bb40a166beba9df5c04e688257550947b0c000000004847304402205fc9d64a174c229f9fb5a4ea6d408c79cc32981f42e8dffd100c2a6897f4838b02203acfd2e0a1e63da4a6ee7a7d6d6d895637fec126ec80332c8524c96f01a3ade301feffffff6eeba9c3f2d0a3b649d182cd9af8d9441075964acee3e4e678e7d35a44343e57000000004847304402205c5721820fd2987edc7e62b15c7440f7e47c1d6052792e87050785249af8a96702206aa25d46cba24997bc42e5602efe479f51079bfb26e7562890af94a10282801801feffffff385919aa7afc6a03bfdc656e28f15109a3c828e2bff2707d70b86a2ad91f0f3200000000494830450221009e77573747901c72c81aa97270e6fc7451191a207db6428555a0183295730184022045346d698786380ebb86c052b8cba80ac4c37a7342b8fd460e34f6b4d6eef96301feffffff42f96817b627b828dd204e968e89bad4cb012a79c669600a6151d8107b033c450000000049483045022100d73652af75f7ea0ddf339b83365e5021bee3baa41e515a240275b92b1fe5108e022076b1ac8d4168169895f82ffccda248029d340e411ff4e6644737c13b8351061f01feffffff62dbbd9fea1f1cc48a5dbea250550ac2380a2d6dd7d764c842a22d5e441a06410000000049483045022100a9222cff45488618c916c235ee9081e1074fd71b4ade51a2901411de278e0e9902204a1e3db06ee617fb388fd326160c631c87324f59aa98522fc233e7f769332cec01feffffff258077586c8c7ae49a95a286f276ab55048e7690bafed91df33119697ac6cf340000000049483045022100f246b38b65e38b728334c5baeac2ae0a54e5ae4767434dae6b2a139fb6f26fe70220160a822ac27711418cb21e08d6f81d46c00fe78752297f60081d64d839e27a1801feffffff64d372aeb07cd03a436cd1942e6cc9091b646b967a1817fe48c6961f4b98b7560000000049483045022100e3fa94a84e9b972a8f7ab27cb4ff90ca69fe2f4c331cf912b34d34ba8591c1a202204b8d835ac2134d7f89c93220a92fa5b2b8cd776a87589ea81ba3f27ab35a53ea01feffffffaee14a01e71c04f21c543d60ed2611105786a9eb99b18e2a5938ff21a730af21000000004847304402205f39d6deca8f0b04d5c9dbc8999cc09e8eb484689221f657bcdb11d81f35cd5902205a681c2cd6de92586a9bd93118152f48fcc46ecd7f8c44e1a8967fdad01770e101feffffffa71594e3a8f96fe01b5e1d86dfc1d988f1bf8dadd52fa661a7ef19901d8c29470000000048473044022016e959292a1a4c7d764b03b0c90a3e9939195db1ba4a82c92175b55c1e6e55df022034e99f5dbd0a7184f80adda447ef2a5494e4576fcafce673a1fbdbb5d93c7fac01feffffff4c6822a28a72b562c485bb6a42666ea44cacb45b5976ab61b4397e60cabe7b2000000000484730440220754bf38188a0432065d22419c703db00bef1c5fc26535b2c32059d3812ec8169022043629a03fbbb77a24713c5d8342179f1e1593974f552c327b0fa84d92579c36d01feffffffbffc29d4d59e83c4bf9c9e157ebbab7ed63ea244ad7a769626954892e18676b000000000484730440220756504d644d6c2045dcaa507055052af5daf65777aa6d39b91c17b240f8a54ed022059615b14f34572dba50f003b6ddd9b00ac39e84a8dcb9c60159c776356485b5d01fefffffff9cd1e3126d5399713f51223aef06fbbeb8c874bcab2095fbc7c3aee5702d6af000000004847304402201d8fa02a325abf845787836407648633620f1e41e9ad298abf981708f692a8a0022066a099a6200176aa4d880aec997bdefd4fd23333cd94e3a11556f8405f77f06501feffffffb6620c3895ac8fd56d39982f47b85fa42af77871404170f81c303d00c2f7040e0000000049483045022100fa4dc54aaff17f66b4c5f4937c902cfd4f4277f2d16c36513264c4483c8d1713022022dc749e834115f056f40bc069d8f36c34439b4176aedc20320ef2e0f00493e201feffffffcf34b8145724564282480c7e56f203ee4d6bdd88f705c4e0199656702669237c00000000484730440220241eaa45b46ad4c486285ddc6ea744692304a1d3a26e8a5c86adf70c42bf771802202b055c940bfd1c391c7591968ff9661cd2aa02761fced43ef078d4963820c4fc01feffffff405aecf028994be5bafc56e8786e36de46e08a17d9c9bd4a26f84b5b1e8c0a370000000049483045022100e4ebed89d23c54c56201439b77f16871043f91a736f81509fecd79a5d733d3ee022047312505f4e3fbfbf339950ba03a600ab602f0345d8d55aec4a169c26340011801feffffffb06d28767e8f8927107f5e8e66d8a0789038fb1ded799f9656675e174793a2120000000049483045022100df9450cd7d51cbb585da7c4b64fe2facb6f5797ead26204cad66a52c6452570b022022391e5d3e0b0cd37e54b38c5dd91b71bc21adbd5bead62090c101edfc4b3cbf01feffffff84a8fcde86fb5ce055a5f2317ec0e9b6b67b4583d5fcdffc423f67ef99b7f3b7000000004847304402201d8ab9307d01a88c0cc95335d5ea5d4328a9ed786cf86dcc267402714f6399fe02202eb4fe58792c4a8aacf7cdb5694a74e160d4f0ca198f1f8137fec9d415d3762901feffffffee7de77ee6e36b0d00b8777af9a572b15013ef56f40e25b70568b58e3581c4a70000000049483045022100c90695a40f15bf66ccb18040876cc5ae1b2a5f97a7184ffbc64bb66ac5eb1ded02205385cc3bb2851d4ff4b6fcc9862d457e41a97afbf4dd2e1d0992cb2d9e4a019901feffffff42e91151786c3c2538d91694fe77cfe51cba7b6543f5c5e518b1f1bfdab2f1be0000000049483045022100838871894281b2db56f62c430e6bbf67ac33d9ba0a98b9109404cc51b5d0860b02201d95670e0e4b6a1d328ef40733e2e87fff4f1c399d12020989f3d1541df314bd01feffffff9132eadf02e6f2e7dff4903276f6019abaeb1fffd5b726e8337c1a7698f578b800000000484730440220112a91d936557bb521c356ddd52a816c0d8b83d7021d5b253ed92f5d268c1be002205f464dbb7f20837685a7792da10ccfa0a4ec9eeee8126386fda2ca5d3923f8c201feffffff80b82e7c49e104d33935a908d8fcabe51f41e593c335e4581918ddd7da02bfdf0000000048473044022041f48c5cf78b059f06703ce427cc44152f0d4a0c8a850c9f5827459489fb4b8402200f9b72db9e19726eb872da8af1823c5c5341fec4e1fd81fce8294830ae8da7ee01fefffffff299dedeb3dfa3ae8a09d84f466fbe78cd0d4dea53691cf970604721f6f464bc0000000049483045022100f5741ecae731ad8e5041cba78f3481bedabea261bbdb3b484b41ee3a52ea149d022036a93df9cf62fdaef4de58cb720a3e83ec398c55d300362d5e2f929b096a87bd01feffffff14175c68c5f87f2ba73c995f5242b69403ebeed0188a4855acd9d147e01070650000000049483045022100c90d502ec9e4a2169fbf4da6df911d2c484a4b4bff1d750e6096754fdfefacd402202a0fec1f0342a492a98036a09e317fd56387b9547754382aa4d7185c0757961301feffffff27a712d00e11c5e2c2091640b1801f0e3146eef1286820c1dd0ac4141faa12ab000000004847304402205fb1a0e145595ee5a6ff89b9ef206d6526e87707037e41d92c240cf3d52742f3022006b9a37654c0ebfdde48b3386cc8b0e7af9cc2c7daf46030646d812bd49ee6dc01feffffff8ae82e4c3b4d5fa9ea37a63269604061d42adf3a140034fad6437759e36fc10d0000000049483045022100e89c2b2104cb18a545f707b1bb546c35f12e61817715132f5e20293a59ebbf2b02204c43b3a770640e4252b7df209e7b19bef51d0ed3219732d401bf06e1ab78e4d601feffffff7a81785abd7ff816c3f7dc30f8bbec357bc0a04635154146526f2f3f75a22ba100000000494830450221008ea06faf7aea18021af559c1a4a74a1b2c380fdef0e2995b55b5751c180b45d4022078905b9d23f354c1b47b02f7a4cb2a39dbc02133c5c9aa132d9b4826ee65777701feffffff449f202ea9f4a831a80fe70dad6d4d400daa2073e212a66f8b8e6aa56e7f4e04000000004847304402201e7045b70885102940427b6a449168cc97477b1f7feced7164c1821685ee13ad02200713ae3c34d060901e1f65c879dd731d700be75d06da068da9a09c7ec12312d201feffffffb575d5e50a311825a07bac5ea297377f202d071ab38b6ea84e42b13b77b6fbef0000000049483045022100d468f798ab2a5df4ee39e36f1352c5785eef6a6a93c744d5a763efcf6021dbe502201bab83f5631d9846647f85fc4c9bf32d14f11c6810daf9d3a9cb5c8357b982c401feffffff7b334718777dbb87762bacc5246b79ac38a29070354775593e6f485bccbe13dd000000004847304402202f30fadb93e68f8a8c67016419c1d2c7efb477c0151b4a37305fa4b13d71fdba022061f457b918ef1d6da881ca006613012d918a01634c3e03d2e18d4bb7500e73af01feffffff95dad3356525eb738763660ccb6b4604ec6a4faeb7cc487dd96988348d383ee4000000004847304402200568c7b563b5211271e4fb5598df400d795549ca684d5b305b8251b204e3399402207eec1bdd76666e86cd042d72032bcc765078a5aa09410d76ebf7be6ed0d82b4f01feffffff812d5ada2ec27eeb3427dc79c841633a796292b675aa90d6b6a58f39c4fc2d420000000049483045022100bbb2f2548806c73ee794939a58031be8dee2edfd158c3d5d640670c82503cde8022019fb1d57a2619fa72097fb2bffd6bf952524797c0a7408b4ed203ac0d03f88e801feffffff71dc44fd1b120fc54eec52e36b5532e0764562f2e8bec63ac6fc607455f600ee0000000049483045022100905990ebf7929975f6e1c3cc9446586bf6f43fa6d7838d4141a20e0b8490a33c0220416153245707b2a468e8b7b858fa296433e4a487c23f54cc91def1a2b011884601feffffff59fbdb27c8c54d12429c7d3fba6e49c5d0f4a870ca3a341754860f1d7ecc38c00000000048473044022020844e2d050fb0e94ef9344f7b8ae38c000387e6c01a6f44c7e21a0a28fc8afd02204645e4f563b18c146ab4df1f3ccdeab2e41f6988fbf4e71cbb000237b599376401feffffffc817935c9adf79085587e97afa66442ac7303fec5091662834e74f2b0f0b2dfd000000004847304402203504f95462fe6114c1acc541b04c3f289144f43cf822b3855e22662da053507d02204959d54041aedd2a005d591a294eeae4b01f833cde4ccf41563cd267b2ec325f01feffffff33726d184862c607e2ed01c3f82481c3fed83310e3d285f9e23044e79f585edc0000000049483045022100b1bbc504aa05adcc704543c3957c33872e851252c021ef16d3addcd24173639402207ecffde550b07b28c5eacb47523c45b9b5094ef7fbf59cb7491e820d5adff47701feffffffe30e6e9f7f82b0de0554bd471fe8a1a04c31f7a77947c357f946043f50553c03000000004847304402200c9227ff383917a27af467d5300c0152628e4f11b33f352050f613b448e24027022018e44fd952c15ecb69493215567321a4441e0cee10af37340dcf8c412fe9298201feffffff655edd1c7fcf2eee5fe3a784cdf4d71b29ae101d98e7cf551734e40d29d507570000000048473044022036b924dd9d921dfcd9315e8fd1972d9e50d86c8d9c2d6a4226c169d8db990b38022067e20089e1201a927364daeccbfa05074ca9e5cbb2900250ee3aabb82f906a5d01feffffff08c00b6afdb573c6e55f04657ea5dcf79ba1835b81831e02efa0aa32d33282640000000049483045022100a43c1d8398a0908cfb07fbbd1a77d8d2482c297a1ec19e790c79cadb81f8caf60220772658d181f80f15ed50243f3433294d8aacf7b86585865b811ebdf21ff0745d01feffffffaaccfba5e45511f5d455b64f806ea10505ed790cee0ce8c67ad5feb14e16f4d60000000049483045022100894ea769e923922eed07999d1931aedd7a77b72f5970851eb5db99c688bbd7f3022034434f0b119408c3ea728cb3bd58e11fb255ccd5416738c82e4b0808ac45212601feffffff7bd6093ed5e26988e9a3a2e6407219964f44dd7970b7968c31797195c789f0900000000049483045022100c8b9de099bd9524beffc16afecbc0eb8003baa460b903060befb30fbca34086d02206c3ac5fdc2d0703ba77ea08b734a8e5a64b02946be12f81e05e3b9e48d92540401feffffff9cded2c4561de03f4bbb62636ea049a18f3c30f9a51e411004e33e41adfe6fc3000000004847304402203d46cb53de495ca0933eef4148f6e2c0f9451582ad14f73483ba9e2ba7570c7d02207d5d0058e87a93769d0932303f9205cc48056cb8eaca0b0d25355515ecd8027401feffffff0acfc677a6d9f77b5857056017f9daaeee450b7941becce7568d44b1d17ed40700000000484730440220032dd706a34be769df5bb76f03169864ea86b99072bcad331f9d8ba8a80c612302206bb90011a5910221a2b79dfa6d29c2a255404a1dd307f74c2b43e08311902fa801feffffff68b0aedd4253842b7516c8c49be569b5a2634ceff79a38a2ad1bb2f1a70e06d6000000004847304402203bd2b90a05891237492f8f6d1e9b91a09c458503897f193f2d9fec4375068297022023cda6a5943af0740f4e1b83ea39a419f5bd8f3e756a52f30c901a1d7b14a9f601feffffffd468ff69081c55f2d15b6ad885a154ad05421295098f41415641f5bd200d8c4f000000004847304402200a915597935a4da0237a96354ea4efefcc981a4cb9d8284cc4eb86c26e442a29022061acd73c2bf542da022b0d9135546582b91e0a1fa5e7f353f1c0cf51f1e750dd01feffffff9ea03210197b62c450cd725e7e6e162c925599d913dfd8850668d12f55880a30000000004948304502210097d291e87358b99e831c11778febd0b2f71527916a707a680ba588c91ec2bbdf02207e1fbd53c3058b7aab813c90457e0c8dcb8b46454ea913ae07db5977657bf1e101feffffffd16c772d44fff422f28f55dcb81ba6583af717ba71f7cc747c17d4af28cc9127000000004847304402200b9c95746481dd2767fa2ec8b3043e1c72ef7928442364789188d0e009c1108b02204e9f02fb14bd9463801797a0e8352351abd74e4231d4b7e8370f09318936171f01fefffffff66af09f12b0a0b52e5cb36f8d55a7958b4ca66da3e2faeadaaf90aef8c29cd400000000494830450221009ebbc9a43923561070f3913fcbfbdba05163757e1ca0121ea6b1d0f07a7facc802206e80984b3d41cabf94a12f9e95e43316f7b6aa1639c7c23d84395ba569df935a01feffffff9ad9805ac0ea304e7152a9cf3a0bc0723a3eaa43c17cc379b855d65ce765ca58000000004847304402207a8f9a562b56124066a2536e7680c2ba7a4df7385fd8b0dbbcfa864792b3863b02201613f2163258392bd8494f623af022c98e816c09252470930025273fe52441dd01feffffff5da965ab613a42c00437e137fe0d125016af032fcb4e7f3241f66983bff409080000000048473044022026250e31d427f323a8b5c595f00fd71c46090b8d2166633252b54c40d43cef6a022004287de864f79cab1e15b826209889349865859fb131bfe97fb7481e21856fe101feffffffc52746b5166fa3f4ebaf785befd618b084859e8186f7cd6db42fec03fa872435000000004847304402204dad7f6ceab776d84d99422dec78e0b914fdb2971d39b1f64efa708976139c1e022043d7b2944ebb4d283815f7f87b1204a7e4e5622a0a568309bc040fc465cecfa101feffffff5aea286e4041bee29d6a884654a0f63c9d972a22e999d4a537bc287842d1ce720000000048473044022019705311d582a7255461da67a2687b89e241ebf7c9a93e06c1af9e071242d923022046cce321507df01c1732c830b3bdba100ac9b5d3d25281ede05482a6e3fb1f7501feffffff0df5d0a5051d046d4519c94f95bd91d69699341dd303bd02894d1cd7b74076ea0000000049483045022100fe79c25c7b16fef06b41d1bad1289718172ef9cb898058509dc202ada0d0259502201f27854eb0c997f7c705b910da8139b5ac9de1ce0130a552416070be80c8770301feffffff567e0cb9d71baae8a09a793e90045bf1a0ba1279488cf119a8869f401038dbf7000000004847304402200d574888749bdd8b887fd8491ddbfba9dc040bf25155032715420da4e2127b7e02201e99935b3fbf7c1e9504cf1280767d9616b69853b5c873073e7d76af3c417eb601fefffffff1b515c224e63a48dce2adbcfa80871fbf01760172e3e09505bc57608179bc07000000004847304402200ee73d9e7a1b0e23a51fc2e685b339ec72c2fa41e72c71915092e0452bb1561502205d13b873d3bb2e14e1919463d287796685a217ff4c7e275ab5f041d28fdbdbf201feffffff1ac6e90fc7f604e432197a50b0163ab68edf62f84846c14bca078dad8273307c0000000049483045022100ed60d64ea07f4efc7710b6699a1056bdd0e0451cc6da30f0fcdd3b88d86c32270220661aef3094da899ed45a38901578acdcd2def80c8a3db598fefd1a04880ce08801feffffff2e96a46ef2bd690066617da9a8783e9830067155a9baec28f144c026775dc1f100000000494830450221009d3b45a746886a5d9a91ca6a050917dde32093bcaa423743f2ba51f4adad1b7602202769b786e545b7eb10bed620d154cf0b47aec48ca57834ea7219ba1606574f4f01feffffff695b2f43d88fd6b056632f965e88668d8fc3394df33ede58fd89e72236409ddd0000000049483045022100ffe93026d7fe196a75102df699fa035e348020fc8390edfe5491cacb1c841c4b0220533882006e4de97b124081361c193d44d8e9ce1f3903cb824f2858c3d40b28df01feffffffb04e107390563f085f6809505309bf45b4ef074cbd5aef0230c5a295841ef573000000004847304402200bdd5e660d8c50e6d49cd4089df69c88cde04b0cc2ac5867ec1af0c264bd6441022075eab928ded8316e7af225861d5060a98defc39afc83e6b594c057715be2ac6801feffffffa736cf2458d167237f2b41edae982da0b1e70106c267302133c5bc4c15aea8400000000048473044022058e75cb39fe6c65bc72b967d2d9fe78eede884bcd18d7f70599a5270d467a8b802207b2c2156e8786c303aab952a644b400079c05df754b30a47df66ab70fdbc39cb01feffffff717b6b377f90b170b72eb65fefb7f251f895f1b2e6e1d7e839cabd5f553677500000000049483045022100ed1e6bcf10b2b7261886c010ae4d4ae1142b18538384b290647b261fec7d5c2402207a39b362409c1494aefa691ae7cfc61bb31827e38504e3c679122e29eab6604001feffffff97b7863aed045333d3a4ae6118ea7329a99cee17f4be571e7f6385e6f1640a5f0000000049483045022100c1690caaed7b720395ea226799899faab77783b38966d0470fa891683b649f5c0220555ac95ebe47e58486ca5a0a5cda6f7b6319607c8ea52235586901adb51b0c3b01feffffff79dff964a7a39bfdb7cfb647086b36ec062569cf372e093d3456ba0188a396f3000000004847304402205525d3338edd71a279bd00c4f90d681df8bf8f5bf48a7fde14d8dadbb4c130ed0220208f646761bc48e9c7f30ec840e08df889738c590acbb91221af74b0d8d1127801feffffffa2470935e629f8c904995b78251a795413a88b6c01180faf361780bd949a03de000000004847304402202b507f3b534de778644386a85578ccef6ffac544cce9fe7e48ddd191e2af2f7a0220653f9ec3191c0c365707e23a4b4929c36a7a66a2342031d7c7a9a220357083bb01feffffffce965dc82394af3a79f2085c2f4b23f32a3e640dd7cba3cb52b3928dfa93da200000000048473044022002009be49cd2aee287ce5f1e8e55c038860f347f5ae9f137b5e820bbafe1c6d802205c5801f6e5495631bbe1a94c30bac3b02035effa570a09d3fe64bef9fe8b295d01feffffff8cacf61f8e579bec02a0dce4c64fe339035030204166913cf09e087a7e5764d0000000004847304402205884129ad78f33db6197d60e7e62be1ebac99d5d11117a0a9c07edb78c34a74f02205643b9d374ae5e8dd8f3439633b1bc340a3a2749816cb4f563ab88b7dd63f10e01feffffff9edd90adf1e3ee5241869b069cd7c1a6a717520af679802ab57b391632aa8d5a0000000049483045022100dde670949c7bdd7b5c39d510aacdbac372b475be5c72ff2b9ba0871b0d671ab702206ad548e170295d1829444cf7a15772a39daba449af86349e3a83f7ac97bb560b01fefffffff6afd548061b8d9ff905f0b605b87290b38beb7e27c0399f7a58412d48377cfc000000004847304402206b93673e15c11425512235c472ded2566221d0e8106e584c630b08b17d25457a02205b697f119358540141865f24bef32bdc51f06c89a920f8f253c1e61562b79b1901feffffff3b6a1440e9beee892a96ab5046cfd2889a4036a22d5874c1367783d77c0553760000000049483045022100e8ce77a95dacaaf5d30352e10061caa96c85a41c020dd4d4371f574047dc72d202203298f462255f8bf32cb3a40e5a3e08d5af7a50c9f9c3fa50ea87ce00e201848a01feffffff6a4630ad76119baac2a7c82d9c5115c4f58fc9c9b395867ae3145f84d00489a60000000049483045022100ffff8768c147e9b230db03247f30fbc908d58f3fe2d60633ca5afaa48c6a4d1f022077bc02f319ddd9ffc4fd9479121d802eb52062bfda6db28f6167cc4b57cffc1801feffffff2fad9d64925443b77ea62dfde38321a0f905fde3ba775c1e8439a15493073f15000000004847304402205eca57caba7e82d10ac9826309301306c1727e7876bfc6e5d0dbad36ad415f8a022026b7acd46c0ce72abd8862313803851e0bf210b081c4386ebc0b52067314694801feffffffea7ee2cba93405a8c9328689294955494f3bfffa709c65f88f79372f528a48100000000049483045022100e5ef084960ceaea6533c1a56fa5aeb066b8c3ca2340fedd5bc4d4dca42fc094802202dc12461e5eb20d898122a00fadfb09ec36af77eaa3c8351d75f80d345fe309401feffffffa7d438d16cf40eb3803c495811d1b6e26fa257c960ba7ba0ba7a5e5b71261a8d00000000484730440220606197a178b2a7298a5dc7fca74959b13d4722d914fb5bb20d80291350de068a022067049fd5e86e2c1bbb789fd183d71749eba4754cbdbb850c1dec0fa15497389b01feffffff28137f475825f4244c7225cc59150bd50205dfb11487724e800608ec119910d500000000494830450221009c065ba511e5f340f533ebe759a3d04210bd142c2fc3f4f55a47eeb9434f8f4702206119de29c1b4bba70652f2dd218da44a7fceeee50b969d6a1d72e18562b2a19001feffffffeee3118b8c0c15fa65c77fa940df6ef587cb17a703d0f90d81f07ac741a69c900000000049483045022100e94cf3dea5d2e609f6c611c6658c19edb48ecd5ab93b6373915d9a8490faba420220375bd6fcf5d2b88cdd078a49ca7370a5af0b86e854d16a4b66dc9addfa730e1b01feffffff46cbd2e9e175171d636a0a223b8097c06b6665b3eb225ce5a1185c2c6c324281000000004847304402202ef4acc0d25cdfbc371a9b8801e5134ee7cbf7833ecabee7832d952a13c89f3902207b61a25c06818bac92e05b13341853d1903a29ed9cf9040a1e40bd991bd1a1dc01feffffff584961feb9095cb44090b17e547b544ee2c45c1fb01f5550fec7af801aa6d1790000000049483045022100edf1e6d11d1bd3a0bbaf73f71dc570372cd214cdbc84ea6eef1c96578ec7fb4d02203a9ceaa5189ec8849642621d5aec48363db20ead0a611cf94ce22400b47095ee01feffffff02a04a7c02000000001976a91478a7602aa3bd71b1a5e777d15a1f8718d50d658388ac8c850f00000000001976a91433e9e62985787e31b7ea0e50050f36b3c676e13c88ac01060000',
'amount': 0.41700000,
'vout': 0,
'confirmations': 11,
'scriptPubKey': '76a91478a7602aa3bd71b1a5e777d15a1f8718d50d658388ac',
'address': 'Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz'
}]
tx1hex = construct_tx(server_db, "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", custom_inputs=custom_inputs)
tx2hex = construct_tx(server_db, "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", custom_inputs=custom_inputs)
tx1f = BytesIO(binascii.unhexlify(tx1hex))
tx1 = bitcoin.core.CTransaction.stream_deserialize(tx1f)
tx2f = BytesIO(binascii.unhexlify(tx2hex))
tx2 = bitcoin.core.CTransaction.stream_deserialize(tx2f)
assert (tx1.vin[0].prevout.hash, tx1.vin[0].prevout.n) == (tx2.vin[0].prevout.hash, tx2.vin[0].prevout.n)
def test_disable_utxolocks(server_db):
transaction.initialise() # reset UTXO_LOCKS
"""with `disable_utxo_locks=True` it should use the same UTXO"""
tx1hex = construct_tx(server_db, "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", disable_utxo_locks=True)
tx2hex = construct_tx(server_db, "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", "Ukn3L4dgG13R3dSdxLvAAJizeiaW7cyUFz", disable_utxo_locks=True)
tx1f = BytesIO(binascii.unhexlify(tx1hex))
tx1 = bitcoin.core.CTransaction.stream_deserialize(tx1f)
tx2f = BytesIO(binascii.unhexlify(tx2hex))
tx2 = bitcoin.core.CTransaction.stream_deserialize(tx2f)
assert (tx1.vin[0].prevout.hash, tx1.vin[0].prevout.n) == (tx2.vin[0].prevout.hash, tx2.vin[0].prevout.n)
| 369.481928
| 27,318
| 0.973848
| 384
| 30,667
| 77.598958
| 0.273438
| 0.001611
| 0.00443
| 0.003826
| 0.052655
| 0.052655
| 0.051715
| 0.04799
| 0.042922
| 0.042922
| 0
| 0.599993
| 0.014185
| 30,667
| 82
| 27,319
| 373.987805
| 0.385651
| 0.004174
| 0
| 0.320755
| 0
| 0
| 0.921779
| 0.920003
| 0
| 1
| 0
| 0
| 0.056604
| 1
| 0.075472
| false
| 0
| 0.169811
| 0
| 0.264151
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2dba028f15a26355a03fde0270efee77b963fbf2
| 739
|
py
|
Python
|
chap3/3-6.py
|
StewedChickenwithStats/Answers-to-Python-Crash-Course
|
9ffbe02abba5d111f702d920db7932303daf59d4
|
[
"MIT"
] | 1
|
2022-02-21T07:05:48.000Z
|
2022-02-21T07:05:48.000Z
|
chap3/3-6.py
|
StewedChickenwithStats/Answers-to-Python-Crash-Course
|
9ffbe02abba5d111f702d920db7932303daf59d4
|
[
"MIT"
] | null | null | null |
chap3/3-6.py
|
StewedChickenwithStats/Answers-to-Python-Crash-Course
|
9ffbe02abba5d111f702d920db7932303daf59d4
|
[
"MIT"
] | null | null | null |
people=['mom','dad','sister']
print("Now there will be a larger dining-table.")
people.insert(0,'brother')
people.insert(2,'friend')
people.append('teacher')
print("Dear "+people[0]+", I'd like to invite you to have dinner with me on Friday at my home.")
print("Dear "+people[1]+", I'd like to invite you to have dinner with me on Friday at my home.")
print("Dear "+people[2]+", I'd like to invite you to have dinner with me on Friday at my home.")
print("Dear "+people[3]+", I'd like to invite you to have dinner with me on Friday at my home.")
print("Dear "+people[4]+", I'd like to invite you to have dinner with me on Friday at my home.")
print("Dear "+people[5]+", I'd like to invite you to have dinner with me on Friday at my home.")
| 67.181818
| 96
| 0.696888
| 144
| 739
| 3.576389
| 0.277778
| 0.104854
| 0.174757
| 0.093204
| 0.739806
| 0.739806
| 0.739806
| 0.739806
| 0.739806
| 0.739806
| 0
| 0.012759
| 0.151556
| 739
| 11
| 97
| 67.181818
| 0.808612
| 0
| 0
| 0
| 0
| 0
| 0.697297
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.636364
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
931d107109b34c70d04ad0ab51c5390610f0e179
| 153
|
py
|
Python
|
llvm/utils/lit/lit/LitFormats.py
|
clairechingching/ScaffCC
|
737ae90f85d9fe79819d66219747d27efa4fa5b9
|
[
"BSD-2-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
llvm/utils/lit/lit/LitFormats.py
|
clairechingching/ScaffCC
|
737ae90f85d9fe79819d66219747d27efa4fa5b9
|
[
"BSD-2-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
llvm/utils/lit/lit/LitFormats.py
|
clairechingching/ScaffCC
|
737ae90f85d9fe79819d66219747d27efa4fa5b9
|
[
"BSD-2-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
from TestFormats import FileBasedTest
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
| 38.25
| 62
| 0.882353
| 15
| 153
| 9
| 0.6
| 0.333333
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 153
| 3
| 63
| 51
| 0.978261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
93245be5d708a2647fdae572c9b153da1c48df99
| 4,783
|
py
|
Python
|
tests/python/test_ringbuf.py
|
yzhao1012/bcc
|
15340c44b98d8ee97a6dce775de614fd268cee13
|
[
"Apache-2.0"
] | 7
|
2020-08-24T01:56:06.000Z
|
2022-02-26T15:49:44.000Z
|
tests/python/test_ringbuf.py
|
yzhao1012/bcc
|
15340c44b98d8ee97a6dce775de614fd268cee13
|
[
"Apache-2.0"
] | 9
|
2021-07-29T21:15:28.000Z
|
2022-02-16T18:17:49.000Z
|
tests/python/test_ringbuf.py
|
yzhao1012/bcc
|
15340c44b98d8ee97a6dce775de614fd268cee13
|
[
"Apache-2.0"
] | 8
|
2019-01-25T21:48:34.000Z
|
2022-03-15T16:21:50.000Z
|
#!/usr/bin/env python
# Copyright (c) PLUMgrid, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
from bcc import BPF
import os
import ctypes as ct
import random
import time
import subprocess
from unittest import main, TestCase, skipUnless
from utils import kernel_version_ge
class TestRingbuf(TestCase):
@skipUnless(kernel_version_ge(5,8), "requires kernel >= 5.8")
def test_ringbuf_output(self):
self.counter = 0
class Data(ct.Structure):
_fields_ = [("ts", ct.c_ulonglong)]
def cb(ctx, data, size):
self.assertEqual(size, ct.sizeof(Data))
event = ct.cast(data, ct.POINTER(Data)).contents
self.counter += 1
text = """
BPF_RINGBUF_OUTPUT(events, 8);
struct data_t {
u64 ts;
};
int do_sys_nanosleep(void *ctx) {
struct data_t data = {bpf_ktime_get_ns()};
events.ringbuf_output(&data, sizeof(data), 0);
return 0;
}
"""
b = BPF(text=text)
b.attach_kprobe(event=b.get_syscall_fnname("nanosleep"),
fn_name="do_sys_nanosleep")
b.attach_kprobe(event=b.get_syscall_fnname("clock_nanosleep"),
fn_name="do_sys_nanosleep")
b["events"].open_ring_buffer(cb)
subprocess.call(['sleep', '0.1'])
b.ring_buffer_poll()
self.assertGreater(self.counter, 0)
b.cleanup()
@skipUnless(kernel_version_ge(5,8), "requires kernel >= 5.8")
def test_ringbuf_consume(self):
self.counter = 0
class Data(ct.Structure):
_fields_ = [("ts", ct.c_ulonglong)]
def cb(ctx, data, size):
self.assertEqual(size, ct.sizeof(Data))
event = ct.cast(data, ct.POINTER(Data)).contents
self.counter += 1
text = """
BPF_RINGBUF_OUTPUT(events, 8);
struct data_t {
u64 ts;
};
int do_sys_nanosleep(void *ctx) {
struct data_t data = {bpf_ktime_get_ns()};
events.ringbuf_output(&data, sizeof(data), 0);
return 0;
}
"""
b = BPF(text=text)
b.attach_kprobe(event=b.get_syscall_fnname("nanosleep"),
fn_name="do_sys_nanosleep")
b.attach_kprobe(event=b.get_syscall_fnname("clock_nanosleep"),
fn_name="do_sys_nanosleep")
b["events"].open_ring_buffer(cb)
subprocess.call(['sleep', '0.1'])
b.ring_buffer_consume()
self.assertGreater(self.counter, 0)
b.cleanup()
@skipUnless(kernel_version_ge(5,8), "requires kernel >= 5.8")
def test_ringbuf_submit(self):
self.counter = 0
class Data(ct.Structure):
_fields_ = [("ts", ct.c_ulonglong)]
def cb(ctx, data, size):
self.assertEqual(size, ct.sizeof(Data))
event = ct.cast(data, ct.POINTER(Data)).contents
self.counter += 1
text = """
BPF_RINGBUF_OUTPUT(events, 8);
struct data_t {
u64 ts;
};
int do_sys_nanosleep(void *ctx) {
struct data_t *data = events.ringbuf_reserve(sizeof(struct data_t));
if (!data)
return 1;
data->ts = bpf_ktime_get_ns();
events.ringbuf_submit(data, 0);
return 0;
}
"""
b = BPF(text=text)
b.attach_kprobe(event=b.get_syscall_fnname("nanosleep"),
fn_name="do_sys_nanosleep")
b.attach_kprobe(event=b.get_syscall_fnname("clock_nanosleep"),
fn_name="do_sys_nanosleep")
b["events"].open_ring_buffer(cb)
subprocess.call(['sleep', '0.1'])
b.ring_buffer_poll()
self.assertGreater(self.counter, 0)
b.cleanup()
@skipUnless(kernel_version_ge(5,8), "requires kernel >= 5.8")
def test_ringbuf_discard(self):
self.counter = 0
class Data(ct.Structure):
_fields_ = [("ts", ct.c_ulonglong)]
def cb(ctx, data, size):
self.assertEqual(size, ct.sizeof(Data))
event = ct.cast(data, ct.POINTER(Data)).contents
self.counter += 1
text = """
BPF_RINGBUF_OUTPUT(events, 8);
struct data_t {
u64 ts;
};
int do_sys_nanosleep(void *ctx) {
struct data_t *data = events.ringbuf_reserve(sizeof(struct data_t));
if (!data)
return 1;
data->ts = bpf_ktime_get_ns();
events.ringbuf_discard(data, 0);
return 0;
}
"""
b = BPF(text=text)
b.attach_kprobe(event=b.get_syscall_fnname("nanosleep"),
fn_name="do_sys_nanosleep")
b.attach_kprobe(event=b.get_syscall_fnname("clock_nanosleep"),
fn_name="do_sys_nanosleep")
b["events"].open_ring_buffer(cb)
subprocess.call(['sleep', '0.1'])
b.ring_buffer_poll()
self.assertEqual(self.counter, 0)
b.cleanup()
if __name__ == "__main__":
main()
| 30.081761
| 72
| 0.602133
| 627
| 4,783
| 4.354067
| 0.151515
| 0.048352
| 0.061538
| 0.052747
| 0.88022
| 0.872894
| 0.872894
| 0.872894
| 0.872894
| 0.872894
| 0
| 0.017041
| 0.263851
| 4,783
| 158
| 73
| 30.272152
| 0.758307
| 0.023416
| 0
| 0.832117
| 0
| 0
| 0.297129
| 0.088475
| 0
| 0
| 0
| 0
| 0.058394
| 1
| 0.058394
| false
| 0
| 0.058394
| 0
| 0.19708
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
93453eddb88a7ed666a9350a363bc531cfe4670a
| 69,932
|
py
|
Python
|
infoblox_netmri/api/broker/v3_8_0/device_environment_monitor_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 12
|
2016-02-19T12:37:54.000Z
|
2022-03-04T20:11:08.000Z
|
infoblox_netmri/api/broker/v3_8_0/device_environment_monitor_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2015-11-12T18:37:00.000Z
|
2021-05-19T07:59:55.000Z
|
infoblox_netmri/api/broker/v3_8_0/device_environment_monitor_broker.py
|
azinfoblox/infoblox-netmri
|
02372c5231e2677ab6299cb659a73c9a41b4b0f4
|
[
"Apache-2.0"
] | 18
|
2016-01-07T12:04:34.000Z
|
2022-03-31T11:05:41.000Z
|
from ..broker import Broker
class DeviceEnvironmentMonitorBroker(Broker):
controller = "device_environment_monitors"
def index(self, **kwargs):
"""Lists the available device environment monitors. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device environment information was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device environment information was collected.
:type DeviceID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device environment monitors as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device environment monitor methods. The listed methods will be called on each device environment monitor returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DevEnvMonID
:param sort: The data field(s) to use for sorting the output. Default is DevEnvMonID. Valid values are DevEnvMonID, DeviceID, DataSourceID, DevEnvMonStartTime, DevEnvMonEndTime, DevEnvMonTimestamp, DevEnvMonChangedCols, DevEnvMonIndex, DevEnvMonType, DevEnvMonDescr, DevEnvMonState, DevEnvMonStatus, DevEnvMonMeasure, DevEnvMonLowWarnVal, DevEnvMonLowShutdown, DevEnvMonHighWarnVal, DevEnvMonHighShutdown, DevEnvMonStatusMessage, DevEnvMonStatusAlert.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceEnvironmentMonitor. Valid values are DevEnvMonID, DeviceID, DataSourceID, DevEnvMonStartTime, DevEnvMonEndTime, DevEnvMonTimestamp, DevEnvMonChangedCols, DevEnvMonIndex, DevEnvMonType, DevEnvMonDescr, DevEnvMonState, DevEnvMonStatus, DevEnvMonMeasure, DevEnvMonLowWarnVal, DevEnvMonLowShutdown, DevEnvMonHighWarnVal, DevEnvMonHighShutdown, DevEnvMonStatusMessage, DevEnvMonStatusAlert. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_environment_monitors: An array of the DeviceEnvironmentMonitor objects that match the specified input criteria.
:rtype device_environment_monitors: Array of DeviceEnvironmentMonitor
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def show(self, **kwargs):
"""Shows the details for the specified device environment monitor.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device environment monitor methods. The listed methods will be called on each device environment monitor returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_environment_monitor: The device environment monitor identified by the specified DevEnvMonID.
:rtype device_environment_monitor: DeviceEnvironmentMonitor
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def search(self, **kwargs):
"""Lists the available device environment monitors matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonChangedCols: The fields that changed between this revision of the record and the previous revision.
:type DevEnvMonChangedCols: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonChangedCols: The fields that changed between this revision of the record and the previous revision.
:type DevEnvMonChangedCols: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonDescr: The NetMRI-determined description of the device environment monitor.
:type DevEnvMonDescr: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonDescr: The NetMRI-determined description of the device environment monitor.
:type DevEnvMonDescr: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonEndTime: The ending effective time of this record, or empty if still in effect.
:type DevEnvMonEndTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonEndTime: The ending effective time of this record, or empty if still in effect.
:type DevEnvMonEndTime: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonHighShutdown: The high value of the shut down process in the device environment monitor.
:type DevEnvMonHighShutdown: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonHighShutdown: The high value of the shut down process in the device environment monitor.
:type DevEnvMonHighShutdown: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonHighWarnVal: The high value of the warning message in the device environment monitor.
:type DevEnvMonHighWarnVal: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonHighWarnVal: The high value of the warning message in the device environment monitor.
:type DevEnvMonHighWarnVal: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonIndex: The index of the device in the device environment.
:type DevEnvMonIndex: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonIndex: The index of the device in the device environment.
:type DevEnvMonIndex: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonLowShutdown: The low value of the shut down process in the device environment monitor.
:type DevEnvMonLowShutdown: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonLowShutdown: The low value of the shut down process in the device environment monitor.
:type DevEnvMonLowShutdown: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonLowWarnVal: The low value of the warning message in the device environment monitor.
:type DevEnvMonLowWarnVal: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonLowWarnVal: The low value of the warning message in the device environment monitor.
:type DevEnvMonLowWarnVal: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonMeasure: The measure of the device environment monitor.
:type DevEnvMonMeasure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonMeasure: The measure of the device environment monitor.
:type DevEnvMonMeasure: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonStartTime: The starting effective time of this record.
:type DevEnvMonStartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonStartTime: The starting effective time of this record.
:type DevEnvMonStartTime: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonState: The current state of the device in the device environment monitor.
:type DevEnvMonState: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonState: The current state of the device in the device environment monitor.
:type DevEnvMonState: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonStatus: The status of the device in the Device Environment Monitor.
:type DevEnvMonStatus: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonStatus: The status of the device in the Device Environment Monitor.
:type DevEnvMonStatus: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonStatusAlert: The alert status of the device environment monitor.
:type DevEnvMonStatusAlert: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonStatusAlert: The alert status of the device environment monitor.
:type DevEnvMonStatusAlert: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonStatusMessage: The status message of the device environment monitor.
:type DevEnvMonStatusMessage: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonStatusMessage: The status message of the device environment monitor.
:type DevEnvMonStatusMessage: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonTimestamp: The date and time this record was collected or calculated.
:type DevEnvMonTimestamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonTimestamp: The date and time this record was collected or calculated.
:type DevEnvMonTimestamp: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DevEnvMonType: The NetMRI-determined monitor type of Device Environment.
:type DevEnvMonType: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DevEnvMonType: The NetMRI-determined monitor type of Device Environment.
:type DevEnvMonType: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device environment information was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which device environment information was collected.
:type DeviceID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device environment monitors as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device environment monitor methods. The listed methods will be called on each device environment monitor returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DevEnvMonID
:param sort: The data field(s) to use for sorting the output. Default is DevEnvMonID. Valid values are DevEnvMonID, DeviceID, DataSourceID, DevEnvMonStartTime, DevEnvMonEndTime, DevEnvMonTimestamp, DevEnvMonChangedCols, DevEnvMonIndex, DevEnvMonType, DevEnvMonDescr, DevEnvMonState, DevEnvMonStatus, DevEnvMonMeasure, DevEnvMonLowWarnVal, DevEnvMonLowShutdown, DevEnvMonHighWarnVal, DevEnvMonHighShutdown, DevEnvMonStatusMessage, DevEnvMonStatusAlert.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceEnvironmentMonitor. Valid values are DevEnvMonID, DeviceID, DataSourceID, DevEnvMonStartTime, DevEnvMonEndTime, DevEnvMonTimestamp, DevEnvMonChangedCols, DevEnvMonIndex, DevEnvMonType, DevEnvMonDescr, DevEnvMonState, DevEnvMonStatus, DevEnvMonMeasure, DevEnvMonLowWarnVal, DevEnvMonLowShutdown, DevEnvMonHighWarnVal, DevEnvMonHighShutdown, DevEnvMonStatusMessage, DevEnvMonStatusAlert. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against device environment monitors, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, DevEnvMonChangedCols, DevEnvMonDescr, DevEnvMonEndTime, DevEnvMonHighShutdown, DevEnvMonHighWarnVal, DevEnvMonID, DevEnvMonIndex, DevEnvMonLowShutdown, DevEnvMonLowWarnVal, DevEnvMonMeasure, DevEnvMonStartTime, DevEnvMonState, DevEnvMonStatus, DevEnvMonStatusAlert, DevEnvMonStatusMessage, DevEnvMonTimestamp, DevEnvMonType, DeviceID.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_environment_monitors: An array of the DeviceEnvironmentMonitor objects that match the specified input criteria.
:rtype device_environment_monitors: Array of DeviceEnvironmentMonitor
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available device environment monitors matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, DevEnvMonChangedCols, DevEnvMonDescr, DevEnvMonEndTime, DevEnvMonHighShutdown, DevEnvMonHighWarnVal, DevEnvMonID, DevEnvMonIndex, DevEnvMonLowShutdown, DevEnvMonLowWarnVal, DevEnvMonMeasure, DevEnvMonStartTime, DevEnvMonState, DevEnvMonStatus, DevEnvMonStatusAlert, DevEnvMonStatusMessage, DevEnvMonTimestamp, DevEnvMonType, DeviceID.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonChangedCols: The operator to apply to the field DevEnvMonChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonChangedCols: If op_DevEnvMonChangedCols is specified, the field named in this input will be compared to the value in DevEnvMonChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonChangedCols must be specified if op_DevEnvMonChangedCols is specified.
:type val_f_DevEnvMonChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonChangedCols: If op_DevEnvMonChangedCols is specified, this value will be compared to the value in DevEnvMonChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonChangedCols must be specified if op_DevEnvMonChangedCols is specified.
:type val_c_DevEnvMonChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonDescr: The operator to apply to the field DevEnvMonDescr. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonDescr: The NetMRI-determined description of the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonDescr: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonDescr: If op_DevEnvMonDescr is specified, the field named in this input will be compared to the value in DevEnvMonDescr using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonDescr must be specified if op_DevEnvMonDescr is specified.
:type val_f_DevEnvMonDescr: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonDescr: If op_DevEnvMonDescr is specified, this value will be compared to the value in DevEnvMonDescr using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonDescr must be specified if op_DevEnvMonDescr is specified.
:type val_c_DevEnvMonDescr: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonEndTime: The operator to apply to the field DevEnvMonEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonEndTime: If op_DevEnvMonEndTime is specified, the field named in this input will be compared to the value in DevEnvMonEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonEndTime must be specified if op_DevEnvMonEndTime is specified.
:type val_f_DevEnvMonEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonEndTime: If op_DevEnvMonEndTime is specified, this value will be compared to the value in DevEnvMonEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonEndTime must be specified if op_DevEnvMonEndTime is specified.
:type val_c_DevEnvMonEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonHighShutdown: The operator to apply to the field DevEnvMonHighShutdown. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonHighShutdown: The high value of the shut down process in the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonHighShutdown: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonHighShutdown: If op_DevEnvMonHighShutdown is specified, the field named in this input will be compared to the value in DevEnvMonHighShutdown using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonHighShutdown must be specified if op_DevEnvMonHighShutdown is specified.
:type val_f_DevEnvMonHighShutdown: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonHighShutdown: If op_DevEnvMonHighShutdown is specified, this value will be compared to the value in DevEnvMonHighShutdown using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonHighShutdown must be specified if op_DevEnvMonHighShutdown is specified.
:type val_c_DevEnvMonHighShutdown: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonHighWarnVal: The operator to apply to the field DevEnvMonHighWarnVal. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonHighWarnVal: The high value of the warning message in the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonHighWarnVal: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonHighWarnVal: If op_DevEnvMonHighWarnVal is specified, the field named in this input will be compared to the value in DevEnvMonHighWarnVal using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonHighWarnVal must be specified if op_DevEnvMonHighWarnVal is specified.
:type val_f_DevEnvMonHighWarnVal: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonHighWarnVal: If op_DevEnvMonHighWarnVal is specified, this value will be compared to the value in DevEnvMonHighWarnVal using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonHighWarnVal must be specified if op_DevEnvMonHighWarnVal is specified.
:type val_c_DevEnvMonHighWarnVal: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonID: The operator to apply to the field DevEnvMonID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonID: The internal NetMRI identifier of Device Environment. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonID: If op_DevEnvMonID is specified, the field named in this input will be compared to the value in DevEnvMonID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonID must be specified if op_DevEnvMonID is specified.
:type val_f_DevEnvMonID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonID: If op_DevEnvMonID is specified, this value will be compared to the value in DevEnvMonID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonID must be specified if op_DevEnvMonID is specified.
:type val_c_DevEnvMonID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonIndex: The operator to apply to the field DevEnvMonIndex. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonIndex: The index of the device in the device environment. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonIndex: If op_DevEnvMonIndex is specified, the field named in this input will be compared to the value in DevEnvMonIndex using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonIndex must be specified if op_DevEnvMonIndex is specified.
:type val_f_DevEnvMonIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonIndex: If op_DevEnvMonIndex is specified, this value will be compared to the value in DevEnvMonIndex using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonIndex must be specified if op_DevEnvMonIndex is specified.
:type val_c_DevEnvMonIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonLowShutdown: The operator to apply to the field DevEnvMonLowShutdown. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonLowShutdown: The low value of the shut down process in the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonLowShutdown: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonLowShutdown: If op_DevEnvMonLowShutdown is specified, the field named in this input will be compared to the value in DevEnvMonLowShutdown using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonLowShutdown must be specified if op_DevEnvMonLowShutdown is specified.
:type val_f_DevEnvMonLowShutdown: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonLowShutdown: If op_DevEnvMonLowShutdown is specified, this value will be compared to the value in DevEnvMonLowShutdown using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonLowShutdown must be specified if op_DevEnvMonLowShutdown is specified.
:type val_c_DevEnvMonLowShutdown: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonLowWarnVal: The operator to apply to the field DevEnvMonLowWarnVal. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonLowWarnVal: The low value of the warning message in the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonLowWarnVal: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonLowWarnVal: If op_DevEnvMonLowWarnVal is specified, the field named in this input will be compared to the value in DevEnvMonLowWarnVal using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonLowWarnVal must be specified if op_DevEnvMonLowWarnVal is specified.
:type val_f_DevEnvMonLowWarnVal: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonLowWarnVal: If op_DevEnvMonLowWarnVal is specified, this value will be compared to the value in DevEnvMonLowWarnVal using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonLowWarnVal must be specified if op_DevEnvMonLowWarnVal is specified.
:type val_c_DevEnvMonLowWarnVal: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonMeasure: The operator to apply to the field DevEnvMonMeasure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonMeasure: The measure of the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonMeasure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonMeasure: If op_DevEnvMonMeasure is specified, the field named in this input will be compared to the value in DevEnvMonMeasure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonMeasure must be specified if op_DevEnvMonMeasure is specified.
:type val_f_DevEnvMonMeasure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonMeasure: If op_DevEnvMonMeasure is specified, this value will be compared to the value in DevEnvMonMeasure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonMeasure must be specified if op_DevEnvMonMeasure is specified.
:type val_c_DevEnvMonMeasure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonStartTime: The operator to apply to the field DevEnvMonStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonStartTime: If op_DevEnvMonStartTime is specified, the field named in this input will be compared to the value in DevEnvMonStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonStartTime must be specified if op_DevEnvMonStartTime is specified.
:type val_f_DevEnvMonStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonStartTime: If op_DevEnvMonStartTime is specified, this value will be compared to the value in DevEnvMonStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonStartTime must be specified if op_DevEnvMonStartTime is specified.
:type val_c_DevEnvMonStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonState: The operator to apply to the field DevEnvMonState. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonState: The current state of the device in the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonState: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonState: If op_DevEnvMonState is specified, the field named in this input will be compared to the value in DevEnvMonState using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonState must be specified if op_DevEnvMonState is specified.
:type val_f_DevEnvMonState: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonState: If op_DevEnvMonState is specified, this value will be compared to the value in DevEnvMonState using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonState must be specified if op_DevEnvMonState is specified.
:type val_c_DevEnvMonState: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonStatus: The operator to apply to the field DevEnvMonStatus. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonStatus: The status of the device in the Device Environment Monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonStatus: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonStatus: If op_DevEnvMonStatus is specified, the field named in this input will be compared to the value in DevEnvMonStatus using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonStatus must be specified if op_DevEnvMonStatus is specified.
:type val_f_DevEnvMonStatus: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonStatus: If op_DevEnvMonStatus is specified, this value will be compared to the value in DevEnvMonStatus using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonStatus must be specified if op_DevEnvMonStatus is specified.
:type val_c_DevEnvMonStatus: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonStatusAlert: The operator to apply to the field DevEnvMonStatusAlert. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonStatusAlert: The alert status of the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonStatusAlert: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonStatusAlert: If op_DevEnvMonStatusAlert is specified, the field named in this input will be compared to the value in DevEnvMonStatusAlert using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonStatusAlert must be specified if op_DevEnvMonStatusAlert is specified.
:type val_f_DevEnvMonStatusAlert: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonStatusAlert: If op_DevEnvMonStatusAlert is specified, this value will be compared to the value in DevEnvMonStatusAlert using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonStatusAlert must be specified if op_DevEnvMonStatusAlert is specified.
:type val_c_DevEnvMonStatusAlert: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonStatusMessage: The operator to apply to the field DevEnvMonStatusMessage. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonStatusMessage: The status message of the device environment monitor. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonStatusMessage: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonStatusMessage: If op_DevEnvMonStatusMessage is specified, the field named in this input will be compared to the value in DevEnvMonStatusMessage using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonStatusMessage must be specified if op_DevEnvMonStatusMessage is specified.
:type val_f_DevEnvMonStatusMessage: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonStatusMessage: If op_DevEnvMonStatusMessage is specified, this value will be compared to the value in DevEnvMonStatusMessage using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonStatusMessage must be specified if op_DevEnvMonStatusMessage is specified.
:type val_c_DevEnvMonStatusMessage: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonTimestamp: The operator to apply to the field DevEnvMonTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonTimestamp: If op_DevEnvMonTimestamp is specified, the field named in this input will be compared to the value in DevEnvMonTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonTimestamp must be specified if op_DevEnvMonTimestamp is specified.
:type val_f_DevEnvMonTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonTimestamp: If op_DevEnvMonTimestamp is specified, this value will be compared to the value in DevEnvMonTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonTimestamp must be specified if op_DevEnvMonTimestamp is specified.
:type val_c_DevEnvMonTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DevEnvMonType: The operator to apply to the field DevEnvMonType. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DevEnvMonType: The NetMRI-determined monitor type of Device Environment. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DevEnvMonType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DevEnvMonType: If op_DevEnvMonType is specified, the field named in this input will be compared to the value in DevEnvMonType using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DevEnvMonType must be specified if op_DevEnvMonType is specified.
:type val_f_DevEnvMonType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DevEnvMonType: If op_DevEnvMonType is specified, this value will be compared to the value in DevEnvMonType using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DevEnvMonType must be specified if op_DevEnvMonType is specified.
:type val_c_DevEnvMonType: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device from which device environment information was collected. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device environment monitors as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device environment monitor methods. The listed methods will be called on each device environment monitor returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DevEnvMonID
:param sort: The data field(s) to use for sorting the output. Default is DevEnvMonID. Valid values are DevEnvMonID, DeviceID, DataSourceID, DevEnvMonStartTime, DevEnvMonEndTime, DevEnvMonTimestamp, DevEnvMonChangedCols, DevEnvMonIndex, DevEnvMonType, DevEnvMonDescr, DevEnvMonState, DevEnvMonStatus, DevEnvMonMeasure, DevEnvMonLowWarnVal, DevEnvMonLowShutdown, DevEnvMonHighWarnVal, DevEnvMonHighShutdown, DevEnvMonStatusMessage, DevEnvMonStatusAlert.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceEnvironmentMonitor. Valid values are DevEnvMonID, DeviceID, DataSourceID, DevEnvMonStartTime, DevEnvMonEndTime, DevEnvMonTimestamp, DevEnvMonChangedCols, DevEnvMonIndex, DevEnvMonType, DevEnvMonDescr, DevEnvMonState, DevEnvMonStatus, DevEnvMonMeasure, DevEnvMonLowWarnVal, DevEnvMonLowShutdown, DevEnvMonHighWarnVal, DevEnvMonHighShutdown, DevEnvMonStatusMessage, DevEnvMonStatusAlert. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_environment_monitors: An array of the DeviceEnvironmentMonitor objects that match the specified input criteria.
:rtype device_environment_monitors: Array of DeviceEnvironmentMonitor
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def device(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
def infradevice(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DevEnvMonID: The internal NetMRI identifier of Device Environment.
:type DevEnvMonID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : InfraDevice
"""
return self.api_request(self._get_method_fullname("infradevice"), kwargs)
| 55.194949
| 773
| 0.62598
| 8,124
| 69,932
| 5.337765
| 0.037543
| 0.068259
| 0.044369
| 0.073056
| 0.95148
| 0.950604
| 0.918412
| 0.906143
| 0.896573
| 0.893921
| 0
| 0.004155
| 0.294457
| 69,932
| 1,266
| 774
| 55.238547
| 0.874726
| 0.829177
| 0
| 0
| 0
| 0
| 0.074899
| 0.027328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.411765
| false
| 0
| 0.058824
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
fa79e9dcf40af0a83a9acd7897863e5ff55fe079
| 1,169
|
py
|
Python
|
python/hetu/gpu_links/SamMaxLink.py
|
codecaution/Hetu
|
e278732c2fe3554c8d576585f5bcbf79ade31b68
|
[
"Apache-2.0"
] | null | null | null |
python/hetu/gpu_links/SamMaxLink.py
|
codecaution/Hetu
|
e278732c2fe3554c8d576585f5bcbf79ade31b68
|
[
"Apache-2.0"
] | null | null | null |
python/hetu/gpu_links/SamMaxLink.py
|
codecaution/Hetu
|
e278732c2fe3554c8d576585f5bcbf79ade31b68
|
[
"Apache-2.0"
] | 3
|
2021-11-29T13:47:48.000Z
|
2022-03-03T02:00:43.000Z
|
from __future__ import absolute_import
import ctypes
from .._base import _LIB
from .. import ndarray as _nd
def sammax_link(in_mat, top1_group, topk_indice, out_mat, num_local_gpus, stream=None):
assert isinstance(in_mat, _nd.NDArray)
assert isinstance(top1_group, _nd.NDArray)
assert isinstance(topk_indice, _nd.NDArray)
assert isinstance(out_mat, _nd.NDArray)
_LIB.DLGpuSamMax(
in_mat.handle, top1_group.handle, topk_indice.handle, out_mat.handle, ctypes.c_int(num_local_gpus), stream.handle if stream else None)
def sammax_grad_link(output_grad, in_mat, top1_group, topk_indice, out_mat, num_local_gpus, stream=None):
assert isinstance(output_grad, _nd.NDArray)
assert isinstance(in_mat, _nd.NDArray)
assert isinstance(top1_group, _nd.NDArray)
assert isinstance(topk_indice, _nd.NDArray)
assert isinstance(out_mat, _nd.NDArray)
_LIB.DLGpuSamMaxGrad(
output_grad.handle, in_mat.handle, top1_group.handle, topk_indice.handle, out_mat.handle, ctypes.c_int(num_local_gpus), stream.handle if stream else None)
| 46.76
| 162
| 0.71343
| 160
| 1,169
| 4.84375
| 0.225
| 0.185806
| 0.135484
| 0.225806
| 0.766452
| 0.766452
| 0.766452
| 0.766452
| 0.766452
| 0.766452
| 0
| 0.006472
| 0.207015
| 1,169
| 24
| 163
| 48.708333
| 0.829558
| 0
| 0
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.473684
| 1
| 0.105263
| false
| 0
| 0.210526
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35707622aa43bdf303238809cc191637a145ceca
| 4,056
|
py
|
Python
|
chord_rec/models/seq2seq/Seq2Seq.py
|
TianxueHu/ChordSymbolRec
|
d64a5be4f4914e6f682cb6d4079d7ba8a6fc2eac
|
[
"Unlicense",
"MIT"
] | null | null | null |
chord_rec/models/seq2seq/Seq2Seq.py
|
TianxueHu/ChordSymbolRec
|
d64a5be4f4914e6f682cb6d4079d7ba8a6fc2eac
|
[
"Unlicense",
"MIT"
] | null | null | null |
chord_rec/models/seq2seq/Seq2Seq.py
|
TianxueHu/ChordSymbolRec
|
d64a5be4f4914e6f682cb6d4079d7ba8a6fc2eac
|
[
"Unlicense",
"MIT"
] | null | null | null |
import random
import torch
import torch.nn as nn
import torch.optim as optim
class BaseSeq2Seq(nn.Module):
""" The Sequence to Sequence model. """
def __init__(self, encoder, decoder, device):
super().__init__()
self.device = device
self.encoder = encoder.to(device)
self.decoder = decoder.to(device)
assert self.encoder.encoder_hidden_size == self.decoder.decoder_hidden_size, \
"Hidden dimensions of encoder and decoder must be equal!"
assert self.encoder.n_layers == self.decoder.n_layers, \
"Encoder and decoder must have equal number of layers!"
def forward(self, source, target, out_seq_len = None, teacher_forcing = True, start_idx = None):
""" The forward pass of the Seq2Seq model.
Args:
source (tensor): sequences in source language of shape (batch_size, seq_len, input_size)
out_seq_len (int): the maximum length of the output sequence. If None, the length is determined by the input sequences.
"""
batch_size = source.shape[0]
if out_seq_len is None:
seq_len = source.shape[1]
if start_idx is None:
start_idx = 0
outputs = torch.full((batch_size, seq_len, self.decoder.output_size), start_idx, dtype = torch.float).to(self.device) # problem???
# outputs = torch.zeros(batch_size, seq_len, self.decoder.output_size).to(self.device)
encoder_outputs, hidden = self.encoder(source)
# first input to the decoder is the <sos> token
input = target[:,0].unsqueeze(1)
# input = source[:,0]
for t in range(1, seq_len):
output, hidden = self.decoder(input, hidden)
outputs[:,t,:] = output
# input = output.max(1)[1].unsqueeze(1)
if teacher_forcing:
input = target[:,t].unsqueeze(1)
else:
input = output.max(1)[1].unsqueeze(1)
# print(outputs)
return outputs
class AttnSeq2Seq(nn.Module):
""" The Sequence to Sequence model. """
def __init__(self, encoder, decoder, device):
super().__init__()
self.device = device
self.encoder = encoder.to(device)
self.decoder = decoder.to(device)
assert self.encoder.encoder_hidden_size == self.decoder.decoder_hidden_size, \
"Hidden dimensions of encoder and decoder must be equal!"
assert self.encoder.n_layers == self.decoder.n_layers, \
"Encoder and decoder must have equal number of layers!"
def forward(self, source, target, out_seq_len = None, teacher_forcing = True, start_idx = None):
""" The forward pass of the Seq2Seq model.
Args:
source (tensor): sequences in source language of shape (batch_size, seq_len, input_size)
out_seq_len (int): the maximum length of the output sequence. If None, the length is determined by the input sequences.
"""
batch_size = source.shape[0]
if out_seq_len is None:
seq_len = source.shape[1]
if start_idx is None:
start_idx = 0
outputs = torch.full((batch_size, seq_len, self.decoder.output_size), start_idx, dtype = torch.float).to(self.device) # problem???
# outputs = torch.zeros(batch_size, seq_len, self.decoder.output_size).to(self.device)
encoder_outputs, hidden = self.encoder(source)
# first input to the decoder is the <sos> token
input = target[:,0].unsqueeze(1)
# input = source[:,0]
for t in range(1, seq_len):
output, hidden, attn= self.decoder(input, hidden, encoder_outputs)
outputs[:,t,:] = output
# input = output.max(1)[1].unsqueeze(1)
if teacher_forcing:
input = target[:,t].unsqueeze(1)
else:
input = output.max(1)[1].unsqueeze(1)
# print(outputs)
return outputs
| 36.872727
| 138
| 0.605523
| 517
| 4,056
| 4.597679
| 0.162476
| 0.040387
| 0.022718
| 0.037863
| 0.93395
| 0.93395
| 0.93395
| 0.93395
| 0.93395
| 0.93395
| 0
| 0.01115
| 0.292406
| 4,056
| 109
| 139
| 37.211009
| 0.817073
| 0.251479
| 0
| 0.862069
| 0
| 0
| 0.074227
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 1
| 0.068966
| false
| 0
| 0.068966
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
358a5d02775541f8be8951352bd6bbea60ce714e
| 121
|
py
|
Python
|
ktrain/text/ner/__init__.py
|
Niekvdplas/ktrain
|
808a212a9b8ebddd4e2d75eaca2e54a7ea990b4e
|
[
"Apache-2.0"
] | null | null | null |
ktrain/text/ner/__init__.py
|
Niekvdplas/ktrain
|
808a212a9b8ebddd4e2d75eaca2e54a7ea990b4e
|
[
"Apache-2.0"
] | null | null | null |
ktrain/text/ner/__init__.py
|
Niekvdplas/ktrain
|
808a212a9b8ebddd4e2d75eaca2e54a7ea990b4e
|
[
"Apache-2.0"
] | null | null | null |
from .data import entities_from_conll2003, entities_from_gmb
from .models import print_sequence_taggers, sequence_tagger
| 40.333333
| 60
| 0.884298
| 17
| 121
| 5.882353
| 0.647059
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036036
| 0.082645
| 121
| 2
| 61
| 60.5
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
35bf17164497091c3ba52580d494ce85ce0fef06
| 3,486
|
py
|
Python
|
crossword/tests/test_grid_undo_redo.py
|
philhanna/crossword
|
db05c8301bba8b1b5f31e059d2bba2c734b0d0c0
|
[
"MIT"
] | 1
|
2020-06-30T06:22:31.000Z
|
2020-06-30T06:22:31.000Z
|
crossword/tests/test_grid_undo_redo.py
|
philhanna/crossword
|
db05c8301bba8b1b5f31e059d2bba2c734b0d0c0
|
[
"MIT"
] | 145
|
2020-06-02T17:33:18.000Z
|
2020-08-25T03:25:40.000Z
|
crossword/tests/test_grid_undo_redo.py
|
philhanna/crossword
|
db05c8301bba8b1b5f31e059d2bba2c734b0d0c0
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from crossword import Grid
class TestGridUndoRedo(TestCase):
def test_undo_with_empty_stack(self):
grid = Grid(5)
grid.undo()
self.assertListEqual([], grid.undo_stack)
self.assertListEqual([], grid.redo_stack)
pass
def test_redo_with_empty_stack(self):
grid = Grid(5)
grid.redo()
self.assertListEqual([], grid.undo_stack)
self.assertListEqual([], grid.redo_stack)
pass
def test_add__remove_black_cell(self):
grid = Grid(5)
grid.add_black_cell(1, 2)
self.assertEqual(True, grid.is_black_cell(1, 2))
self.assertEqual(True, grid.is_black_cell(5, 4))
self.assertEqual([(1, 2)], grid.undo_stack)
self.assertEqual([], grid.redo_stack)
grid.remove_black_cell(1, 2)
self.assertEqual(False, grid.is_black_cell(1, 2))
self.assertEqual(False, grid.is_black_cell(5, 4))
self.assertEqual([(1, 2), (1, 2)], grid.undo_stack)
self.assertEqual([], grid.redo_stack)
def test_add_undo(self):
grid = Grid(5)
grid.add_black_cell(1, 2)
self.assertEqual(True, grid.is_black_cell(1, 2))
self.assertEqual(True, grid.is_black_cell(5, 4))
self.assertEqual([(1, 2)], grid.undo_stack)
self.assertEqual([], grid.redo_stack)
grid.undo()
self.assertEqual(False, grid.is_black_cell(1, 2))
self.assertEqual(False, grid.is_black_cell(5, 4))
self.assertEqual([], grid.undo_stack)
self.assertEqual([(1, 2)], grid.redo_stack)
def test_add__add_undo_redo(self):
grid = Grid(5)
grid.add_black_cell(1, 2)
grid.add_black_cell(3, 4)
self.assertEqual(True, grid.is_black_cell(1, 2))
self.assertEqual(True, grid.is_black_cell(5, 4))
self.assertEqual(True, grid.is_black_cell(3, 4))
self.assertEqual(True, grid.is_black_cell(3, 2))
self.assertEqual([(1, 2), (3, 4)], grid.undo_stack)
self.assertEqual([], grid.redo_stack)
grid.undo()
self.assertEqual(True, grid.is_black_cell(1, 2))
self.assertEqual(True, grid.is_black_cell(5, 4))
self.assertEqual(False, grid.is_black_cell(3, 4))
self.assertEqual(False, grid.is_black_cell(3, 2))
self.assertEqual([(1, 2)], grid.undo_stack)
self.assertEqual([(3, 4)], grid.redo_stack)
grid.redo()
self.assertEqual(True, grid.is_black_cell(1, 2))
self.assertEqual(True, grid.is_black_cell(5, 4))
self.assertEqual(True, grid.is_black_cell(3, 4))
self.assertEqual(True, grid.is_black_cell(3, 2))
self.assertEqual([(1, 2), (3, 4)], grid.undo_stack)
self.assertEqual([], grid.redo_stack)
grid.undo()
self.assertEqual(True, grid.is_black_cell(1, 2))
self.assertEqual(True, grid.is_black_cell(5, 4))
self.assertEqual(False, grid.is_black_cell(3, 4))
self.assertEqual(False, grid.is_black_cell(3, 2))
self.assertEqual([(1, 2)], grid.undo_stack)
self.assertEqual([(3, 4)], grid.redo_stack)
grid.undo()
self.assertEqual(False, grid.is_black_cell(1, 2))
self.assertEqual(False, grid.is_black_cell(5, 4))
self.assertEqual(False, grid.is_black_cell(3, 4))
self.assertEqual(False, grid.is_black_cell(3, 2))
self.assertEqual([], grid.undo_stack)
self.assertEqual([(3, 4), (1, 2)], grid.redo_stack)
| 38.733333
| 59
| 0.631096
| 497
| 3,486
| 4.209256
| 0.062374
| 0.329828
| 0.147228
| 0.200765
| 0.92782
| 0.919694
| 0.90392
| 0.876195
| 0.846558
| 0.846558
| 0
| 0.037394
| 0.225186
| 3,486
| 89
| 60
| 39.168539
| 0.737134
| 0
| 0
| 0.831169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.649351
| 1
| 0.064935
| false
| 0.025974
| 0.025974
| 0
| 0.103896
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ea34631c3060db2ad16aa2888d898da8e44a3078
| 3,601
|
py
|
Python
|
jangl_webleads_inbound/verticals/health_insurance.py
|
jangl-platform/jangl-webleads-inbound
|
7ba7734c0614c946f52af23829b7c61ba4fa9460
|
[
"MIT"
] | 1
|
2021-01-14T21:55:27.000Z
|
2021-01-14T21:55:27.000Z
|
jangl_webleads_inbound/verticals/health_insurance.py
|
jangl-platform/jangl-webleads-inbound
|
7ba7734c0614c946f52af23829b7c61ba4fa9460
|
[
"MIT"
] | 1
|
2021-06-10T21:22:36.000Z
|
2021-06-10T21:22:36.000Z
|
jangl_webleads_inbound/verticals/health_insurance.py
|
jangl-platform/jangl-webleads-inbound
|
7ba7734c0614c946f52af23829b7c61ba4fa9460
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
allow_blank = {'default': '', 'initial': '', 'allow_blank': True}
allow_null = {'default': None, 'initial': None, 'allow_null': True}
empty_list = {'default': [], 'initial': [], 'many': True}
class RelativeSerializer(serializers.Serializer):
height = serializers.IntegerField(**allow_null)
weight = serializers.IntegerField(**allow_null)
birth_date = serializers.DateField(**allow_null)
gender = serializers.CharField(max_length=1, **allow_blank)
student = serializers.NullBooleanField(required=False)
tobacco = serializers.NullBooleanField(required=False)
medical_condition = serializers.CharField(max_length=200, **allow_blank)
class CurrentPolicySerializer(serializers.Serializer):
insurance_company = serializers.CharField(max_length=50, **allow_blank)
expiration_date = serializers.DateField(**allow_null)
insured_since = serializers.DateField(**allow_null)
class PingDataSerializer(serializers.Serializer):
height = serializers.IntegerField(max_value=100, **allow_null)
weight = serializers.IntegerField(**allow_null)
birth_date = serializers.DateField(**allow_null)
gender = serializers.CharField(max_length=1, **allow_blank)
student = serializers.NullBooleanField(required=False)
tobacco = serializers.NullBooleanField(required=False)
bmi = serializers.IntegerField(**allow_null)
medical_condition = serializers.CharField(max_length=200, **allow_blank)
currently_employed = serializers.NullBooleanField(required=False)
number_in_household = serializers.IntegerField(**allow_null)
household_income = serializers.IntegerField(**allow_null)
hospitalized = serializers.NullBooleanField(required=False)
ongoing_medical_treatment = serializers.NullBooleanField(required=False)
previously_denied = serializers.NullBooleanField(required=False)
prescriptions = serializers.NullBooleanField(required=False)
prescription_description = serializers.CharField(max_length=255, **allow_blank)
qualifying_life_condition = serializers.CharField(max_length=255, **allow_blank)
spouse = RelativeSerializer(**allow_null)
children = RelativeSerializer(**empty_list)
current_policy = CurrentPolicySerializer(**allow_null)
class PostDataSerializer(serializers.Serializer):
height = serializers.IntegerField(max_value=100, **allow_null)
weight = serializers.IntegerField(**allow_null)
birth_date = serializers.DateField(**allow_null)
gender = serializers.CharField(max_length=1, **allow_blank)
student = serializers.NullBooleanField(required=False)
tobacco = serializers.NullBooleanField(required=False)
bmi = serializers.IntegerField(**allow_null)
medical_condition = serializers.CharField(max_length=200, **allow_blank)
currently_employed = serializers.NullBooleanField(required=False)
number_in_household = serializers.IntegerField(**allow_null)
household_income = serializers.IntegerField(**allow_null)
hospitalized = serializers.NullBooleanField(required=False)
ongoing_medical_treatment = serializers.NullBooleanField(required=False)
previously_denied = serializers.NullBooleanField(required=False)
prescriptions = serializers.NullBooleanField(required=False)
prescription_description = serializers.CharField(max_length=255, **allow_blank)
qualifying_life_condition = serializers.CharField(max_length=255, **allow_blank)
spouse = RelativeSerializer(**allow_null)
children = RelativeSerializer(**empty_list)
current_policy = CurrentPolicySerializer(**allow_null)
| 51.442857
| 84
| 0.783394
| 357
| 3,601
| 7.666667
| 0.196078
| 0.07563
| 0.204604
| 0.233833
| 0.863719
| 0.833394
| 0.833394
| 0.833394
| 0.833394
| 0.812203
| 0
| 0.010028
| 0.113857
| 3,601
| 69
| 85
| 52.188406
| 0.847697
| 0
| 0
| 0.793103
| 0
| 0
| 0.018606
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017241
| 0
| 0.948276
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ea7fa36bd4b3d1d04f92039a39c92c6c8c915a72
| 18,584
|
py
|
Python
|
hovercraft/tests/test_position.py
|
royaso/hovercraft
|
2ca3e8cfd00f5e28077d37bf142e1efd55a63df3
|
[
"MIT"
] | null | null | null |
hovercraft/tests/test_position.py
|
royaso/hovercraft
|
2ca3e8cfd00f5e28077d37bf142e1efd55a63df3
|
[
"MIT"
] | null | null | null |
hovercraft/tests/test_position.py
|
royaso/hovercraft
|
2ca3e8cfd00f5e28077d37bf142e1efd55a63df3
|
[
"MIT"
] | null | null | null |
import os
import unittest
from pkg_resources import resource_string
from lxml import etree
from hovercraft.parse import rst2xml, SlideMaker
from hovercraft.position import gather_positions, calculate_positions, position_slides
TEST_DATA = os.path.join(os.path.split(__file__)[0], 'test_data')
def make_tree(file_name):
"""Loads reStructuredText, outputs an lxml tree"""
rst = resource_string(__name__, os.path.join('test_data', file_name))
xml = rst2xml(rst)
return SlideMaker(etree.fromstring(xml)).walk()
class GatherTests(unittest.TestCase):
"""Tests that position information is correctly parsed"""
def test_gathering(self):
tree = make_tree('positioning.rst')
positions = list(gather_positions(tree))
self.assertEqual(positions, [
{'data-x': 'r0', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': '1', 'is_path': False},
{'data-x': 'r1600', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': False},
{'data-x': 'r1600', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': True,
'path': 'm 100 100 l 200 0 l 0 200'},
{'data-x': 'r1600', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': True},
{'data-x': 'r1600', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': True},
{'data-x': '0', 'data-y': '0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': False},
{'data-x': 'r0', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': '90', 'data-scale': 'r0', 'is_path': False},
{'data-x': 'r0', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': False},
{'data-x': 'r0', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': True,
'path': 'm 100 100 l 200 0 l 0 200'},
{'data-x': 'r0', 'data-y': 'r0', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': True},
{'data-x': 'r0', 'data-y': 'r0', 'data-z': '1000',
'data-rotate-x': '180', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': True},
{'data-x': '3000', 'data-y': '1000', 'data-z': 'r0',
'data-rotate-x': 'r0', 'data-rotate-y': 'r0',
'data-rotate-z': 'r0', 'data-scale': 'r0', 'is_path': False},
])
class CalculateTests(unittest.TestCase):
"""Tests that positions are correctly calculated"""
def test_square(self):
# Slides, positioned in a square
positions = [
{'data-x': '0', 'data-y': '0'},
{'data-x': 'r1200', 'data-y': '0'},
{'data-x': 'r1200', 'data-y': '0'},
{'data-x': 'r1200', 'data-y': '0'},
{'data-x': 'r0', 'data-y': 'r-1000'},
{'data-x': 'r0', 'data-y': 'r-1000'},
{'data-x': 'r0', 'data-y': 'r-1000'},
{'data-x': 'r-1200', 'data-y': 'r0'},
{'data-x': 'r-1200', 'data-y': 'r0'},
{'data-x': 'r-1200', 'data-y': 'r0'},
{'data-x': 'r0', 'data-y': 'r1000'},
{'data-x': 'r0', 'data-y': 'r1000'},
]
positions = list(calculate_positions(positions))
self.assertEqual(positions, [
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1200, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 2400, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3600, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3600, 'data-y': -1000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3600, 'data-y': -2000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3600, 'data-y': -3000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 2400, 'data-y': -3000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1200, 'data-y': -3000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 0, 'data-y': -3000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 0, 'data-y': -2000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 0, 'data-y': -1000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
])
def test_relative_positioning(self):
# Relative positioning is probably the most useful positioning.
# It allows you to insert or remove a slide, and everything adjusts.
positions = [
# The first two slides are just default positons
{'data-x': 'r0', 'data-y': 'r0'},
{'data-x': 'r1600', 'data-y': 'r0'},
# Then suddenly we move vertically!
{'data-x': 'r0', 'data-y': 'r1000'},
# Continue the same way one slide.
{'data-x': 'r0', 'data-y': 'r1000'},
# Stand still
{'data-x': 'r0', 'data-y': 'r0'},
# Stand still again!
{'data-x': 'r0', 'data-y': 'r0'},
# Move a little bit
{'data-x': 'r-40', 'data-y': 'r-200'},
# Go back to normal movement to the right
{'data-x': 'r1600', 'data-y': 'r0'},
{'data-x': 'r1600', 'data-y': 'r0'},
{'data-x': 'r1600', 'data-y': 'r0'},
# Absolute movement back to start!
{'data-x': '0', 'data-y': '0'},
# Absolute movement to a center for end (with zoomout for example)
{'data-x': '3000', 'data-y': '1000'},
]
positions = list(calculate_positions(positions))
self.assertEqual(positions, [
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1600, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1600, 'data-y': 1000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1600, 'data-y': 2000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1600, 'data-y': 2000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1600, 'data-y': 2000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1560, 'data-y': 1800, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3160, 'data-y': 1800, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 4760, 'data-y': 1800, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 6360, 'data-y': 1800, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3000, 'data-y': 1000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
])
def test_absolute_path(self):
# Position slides along a path
positions = [
{'data-x': 'r0', 'data-y': 'r0', 'path': 'M 100 100 L 300 100 L 300 300',
'is_path': True},
{'is_path': True},
{'is_path': True},
{'is_path': True},
{'is_path': True},
]
positions = list(calculate_positions(positions))
self.assertEqual(positions, [
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 2000, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 4000, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 44.99999999999999, 'data-scale': 1},
{'data-x': 4000, 'data-y': 2000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
{'data-x': 4000, 'data-y': 4000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
])
def test_relative_path(self):
positions = [
{'data-x': 'r0', 'data-y': 'r0'},
{'data-x': 'r1600', 'data-y': 'r0'},
{'data-x': 'r1600', 'data-y': 'r0', 'is_path': True,
'path': 'm 100 100 l 200 0 l 0 200', },
{'data-x': 'r0', 'data-y': 'r0', 'is_path': True},
{'data-x': 'r0', 'data-y': 'r0', 'is_path': True},
{'data-x': 'r1600', 'data-y': 'r0'},
{'data-x': 'r0', 'data-y': 'r2400'},
]
positions = list(calculate_positions(positions))
self.assertEqual(positions, [
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1600, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3200, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
# This point is exactly on a 90 degree angle. Therefore,
# it's angle is calculated as 45 degrees, it being the
# average.
{'data-x': 5600, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 44.99999999999999, 'data-scale': 1},
{'data-x': 5600, 'data-y': 2400, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
{'data-x': 7200, 'data-y': 2400, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
{'data-x': 7200, 'data-y': 4800, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
])
def test_complex_path(self):
positions = [
{'data-x': 'r0', 'data-y': 'r0'},
{'data-x': 'r1600', 'data-y': 'r0'},
{'data-x': 'r1600', 'data-y': 'r0', 'path': 'm 100 100 l 200 0 l 0 200',
'is_path': True},
{'is_path': True},
{'is_path': True},
# Note that we don't change the rotation, so it stays at 90, here.
{'data-x': '0', 'data-y': '0'},
# No new x and y, previous was absolute: Stay still!
{},
{'data-x': 'r0', 'data-y': 'r0', 'path': 'm 100 100 l 200 0 l 0 200', 'is_path': True},
{'is_path': True},
{'is_path': True},
{'data-x': '3000', 'data-y': '1000', 'data-rotate-z': '0'},
]
positions = list(calculate_positions(positions))
self.assertEqual(positions, [
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 1600, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 3200, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 5600, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 44.99999999999999, 'data-scale': 1},
{'data-x': 5600, 'data-y': 2400, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
# Note that we don't change the rotation, so it stays at 90, here.
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
# No settings, still same place and rotation.
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
# We start a path, but x and y are r0, so no movement.
# However, the rotation will come from the path, so it resets to 0.
{'data-x': 0, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
{'data-x': 2400, 'data-y': 0, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 44.99999999999999, 'data-scale': 1},
{'data-x': 2400, 'data-y': 2400, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 90.0, 'data-scale': 1},
{'data-x': 3000, 'data-y': 1000, 'data-z': 0,
'data-rotate-x': 0, 'data-rotate-y': 0,
'data-rotate-z': 0, 'data-scale': 1},
])
class PositionTest(unittest.TestCase):
def test_complete(self):
tree = make_tree('positioning.rst')
# Position the slides:
position_slides(tree)
# Get all slide position data:
positions = []
for step in tree.findall('step'):
pos = {}
for key in step.attrib:
if key.startswith('data-'):
pos[key] = step.attrib[key]
positions.append(pos)
self.assertEqual(positions, [
{'data-x': '0', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '0', 'data-scale': '1'},
{'data-x': '1600', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '0', 'data-scale': '1'},
# Because of the path, we now get an explicit rotation:
{'data-x': '3200', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '0', 'data-scale': '1'},
{'data-x': '5600', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '44.99999999999999', 'data-scale': '1'},
{'data-x': '5600', 'data-y': '2400', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '90.0', 'data-scale': '1'},
# Rotation carries over from last part of path.
{'data-x': '0', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '90.0', 'data-scale': '1'},
# No position change
{'data-x': '0', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '90', 'data-scale': '1'},
# No change at all.
{'data-x': '0', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '90', 'data-scale': '1'},
# Path starts, rotation comes from path:
{'data-x': '0', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '0', 'data-scale': '1'},
{'data-x': '2400', 'data-y': '0', 'data-z': '0',
'data-rotate-x': '0', 'data-rotate-y': '0',
'data-rotate-z': '44.99999999999999', 'data-scale': '1'},
# Explicit rotate-x and z, automatic position including rotate-z from path.
{'data-x': '2400', 'data-y': '2400', 'data-z': '1000',
'data-rotate-x': '180', 'data-rotate-y': '0',
'data-rotate-z': '90.0', 'data-scale': '1'},
# Explicit x and y, all other carry over from last slide.
{'data-x': '3000', 'data-y': '1000', 'data-z': '1000',
'data-rotate-x': '180', 'data-rotate-y': '0',
'data-rotate-z': '90.0', 'data-scale': '1'},
])
if __name__ == '__main__':
unittest.main()
| 46.693467
| 99
| 0.455822
| 2,567
| 18,584
| 3.271134
| 0.082587
| 0.167917
| 0.226629
| 0.084316
| 0.781589
| 0.779802
| 0.76051
| 0.754674
| 0.743956
| 0.742408
| 0
| 0.08777
| 0.312742
| 18,584
| 397
| 100
| 46.811083
| 0.569684
| 0.078024
| 0
| 0.75
| 0
| 0
| 0.364993
| 0
| 0
| 0
| 0
| 0
| 0.021341
| 1
| 0.02439
| false
| 0
| 0.018293
| 0
| 0.054878
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
576265c72506f0b10ca6c1c578362d47e884e43d
| 1,418
|
py
|
Python
|
rest/service/restapi.py
|
estuaryoss/estuary-agent
|
b8b4264a616be21c86458da75cf29d13a8fb263d
|
[
"Apache-2.0"
] | null | null | null |
rest/service/restapi.py
|
estuaryoss/estuary-agent
|
b8b4264a616be21c86458da75cf29d13a8fb263d
|
[
"Apache-2.0"
] | null | null | null |
rest/service/restapi.py
|
estuaryoss/estuary-agent
|
b8b4264a616be21c86458da75cf29d13a8fb263d
|
[
"Apache-2.0"
] | null | null | null |
import requests
class RestApi:
def __init__(self, connection):
""" REST API Service usually used for self calls """
self.conn = connection
self.__timeout = 5 if not self.conn.get('timeout') else self.conn.get('timeout')
def post(self, data, headers):
url_format = f"{self.conn.get('protocol')}://{self.conn.get('ip')}:{self.conn.get('port')}{self.conn.get('endpoint')}"
return requests.post(url_format, headers=headers, data=data, timeout=self.__timeout,
verify=self.conn.get('cert'))
def put(self, data, headers):
url_format = f"{self.conn.get('protocol')}://{self.conn.get('ip')}:{self.conn.get('port')}{self.conn.get('endpoint')}"
return requests.put(url_format, headers=headers, data=data, timeout=self.__timeout,
verify=self.conn.get('cert'))
def delete(self, headers):
url_format = f"{self.conn.get('protocol')}://{self.conn.get('ip')}:{self.conn.get('port')}{self.conn.get('endpoint')}"
return requests.delete(url_format, headers=headers, timeout=self.__timeout, verify=self.conn.get('cert'))
def get(self, headers):
url_format = f"{self.conn.get('protocol')}://{self.conn.get('ip')}:{self.conn.get('port')}{self.conn.get('endpoint')}"
return requests.get(url_format, headers=headers, timeout=self.__timeout, verify=self.conn.get('cert'))
| 50.642857
| 126
| 0.635402
| 191
| 1,418
| 4.602094
| 0.193717
| 0.209329
| 0.275313
| 0.077361
| 0.779295
| 0.779295
| 0.779295
| 0.779295
| 0.779295
| 0.775882
| 0
| 0.000855
| 0.174894
| 1,418
| 27
| 127
| 52.518519
| 0.750427
| 0.03103
| 0
| 0.315789
| 0
| 0.210526
| 0.320644
| 0.298682
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0.052632
| 0
| 0.578947
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
aa2e9a7bae656c1aadea77715a5fbfdfe3c7a26f
| 18,797
|
py
|
Python
|
components/plots.py
|
koukyo1994/streamlit-audio
|
f9dd8e86b332e4425d964b59642e0b5eaf8e0c0a
|
[
"MIT"
] | 5
|
2020-09-18T05:35:52.000Z
|
2022-02-28T02:29:15.000Z
|
components/plots.py
|
koukyo1994/streamlit-audio
|
f9dd8e86b332e4425d964b59642e0b5eaf8e0c0a
|
[
"MIT"
] | null | null | null |
components/plots.py
|
koukyo1994/streamlit-audio
|
f9dd8e86b332e4425d964b59642e0b5eaf8e0c0a
|
[
"MIT"
] | 1
|
2021-01-15T01:49:35.000Z
|
2021-01-15T01:49:35.000Z
|
import librosa
import librosa.display as display
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import numpy as np
import pandas as pd
import streamlit as st
def waveplot(y: np.ndarray, sr: int, processed=None, tp: pd.DataFrame=None, fp: pd.DataFrame=None):
plot_wave = st.checkbox("Waveplot")
if plot_wave:
st.sidebar.markdown("#### Waveplot settings")
start_second = st.sidebar.number_input(
"start second",
min_value=0,
max_value=len(y) // sr,
value=0,
step=1,
key="waveplot_start")
end_second = st.sidebar.number_input(
"end second",
min_value=0,
max_value=len(y) // sr,
value=len(y) // sr,
step=1,
key="waveplot_end")
start_index = start_second * sr
if end_second == len(y) // sr:
end_index = len(y)
else:
end_index = end_second * sr
fig = plt.figure(figsize=(12, 4))
plt.grid(True)
display.waveplot(y[start_index:end_index], sr=sr, alpha=0.5)
if processed is not None:
display.waveplot(
processed[start_index:end_index],
sr=sr,
alpha=0.5,
color="red")
if tp is not None and len(tp) > 0:
for _, row in tp.iterrows():
plt.axvspan(row["t_min"], row["t_max"], color="g", alpha=0.5, label=str(row["species_id"]))
if fp is not None and len(fp) > 0:
for _, row in fp.iterrows():
plt.axvspan(row["t_min"], row["t_max"], color="r", alpha=0.5, label=str(row["species_id"]))
plt.legend()
st.pyplot(fig)
def waveplot_with_annotation(y: np.ndarray,
sr: int,
annotation: pd.DataFrame,
filename: str,
processed=None):
plot_wave = st.checkbox("Waveplot")
if filename.endswith(".mp3"):
filename = filename.replace(".mp3", ".wav")
events = annotation.query(f"filename == '{filename}'")
colors = [
"#bf6565", "#ac7ceb", "#e3e176", "#f081e1", "#e8cb6b", "#25b4db",
"#fa787e", "#a9f274", "#1d7335", "#797fb3"
]
if plot_wave:
st.sidebar.markdown("#### Waveplot settings")
start_second = st.sidebar.number_input(
"start second",
min_value=0,
max_value=len(y) // sr,
value=0,
step=1,
key="waveplot_start")
end_second = st.sidebar.number_input(
"end second",
min_value=0,
max_value=len(y) // sr,
value=len(y) // sr,
step=1,
key="waveplot_end")
start_index = start_second * sr
if end_second == len(y) // sr:
end_index = len(y)
end_second = len(y) / sr
else:
end_index = end_second * sr
events_in_period = events.query(
f"onset >= {start_second} & offset <= {end_second}")
uniq_labels = events_in_period["ebird_code"].unique().tolist()
fig = plt.figure(figsize=(12, 4))
plt.grid(True)
display.waveplot(y[start_index:end_index], sr=sr, alpha=0.5)
used_color = [] # type: ignore
for i, event in events_in_period.iterrows():
onset = event.onset
offset = event.offset
color = colors[uniq_labels.index(event.ebird_code)]
if color not in used_color:
label = event.ebird_code
used_color.append(color)
else:
label = "_" + event.ebird_code
plt.axvspan(onset, offset, facecolor=color, alpha=0.5, label=label)
plt.legend()
if processed is not None:
display.waveplot(
processed[start_index:end_index],
sr=sr,
alpha=0.5,
color="red")
st.pyplot(fig)
@st.cache
def melspectrogram(y: np.ndarray, params: dict, log=True):
melspec = librosa.feature.melspectrogram(y=y, **params)
if log:
melspec = librosa.power_to_db(melspec)
return melspec
@st.cache
def spectrogram(y: np.ndarray, params: dict, log=True):
spec = librosa.stft(y, **params)
if log:
spec = librosa.power_to_db(spec)
return spec
def specshow_with_annotation(y: np.ndarray,
sr: int,
annotation: pd.DataFrame,
filename: str,
y_processed=None):
plot_spectrogram = st.checkbox("Spectrogram plot")
if filename.endswith(".mp3"):
filename = filename.replace(".mp3", ".wav")
events = annotation.query(f"filename == '{filename}'")
colors = [
"#bf6565", "#ac7ceb", "#e3e176", "#f081e1", "#e8cb6b", "#25b4db",
"#fa787e", "#a9f274", "#1d7335", "#797fb3"
]
if plot_spectrogram:
st.sidebar.markdown("#### Spectrogram plot settings")
start_second = st.sidebar.number_input(
"start second",
min_value=0,
max_value=len(y) // sr,
value=0,
step=1,
key="specshow_start")
end_second = st.sidebar.number_input(
"end second",
min_value=0,
max_value=len(y) // sr,
value=len(y) // sr,
step=1,
key="specshow_end")
start_index = start_second * sr
if end_second == len(y) // sr:
end_index = len(y)
else:
end_index = end_second * sr
y_plot = y[start_index:end_index]
if y_processed is not None:
y_plot_processed = y_processed[start_index:end_index]
events_in_period = events.query(
f"onset >= {start_second} & offset <= {end_second}")
uniq_labels = events_in_period["ebird_code"].unique().tolist()
st.sidebar.markdown("##### (Mel)spectrogram parameters")
mel = st.sidebar.checkbox("Mel scale", value=True)
n_fft = st.sidebar.number_input(
"n_fft", min_value=64, max_value=8192, value=1024, step=64)
hop_length = st.sidebar.number_input(
"hop_length", min_value=1, max_value=2048, value=320, step=10)
if mel:
n_mels = st.sidebar.number_input(
"n_mels", min_value=1, max_value=512, value=64, step=16)
fmin = st.sidebar.number_input(
"fmin", min_value=1, max_value=8192, value=20, step=100)
fmax = st.sidebar.number_input(
"fmax", min_value=4000, max_value=44100, value=14000, step=100)
log = st.sidebar.checkbox("apply log", value=True)
if mel:
melspec_params = {
"n_fft": n_fft,
"hop_length": hop_length,
"n_mels": n_mels,
"fmin": fmin,
"fmax": fmax,
"sr": sr
}
else:
spec_params = {
"n_fft": n_fft,
"hop_length": hop_length
}
if st.button("Show melspectrogram"):
with st.spinner("Calculating melspectrogram"):
if mel:
spec = melspectrogram(y_plot, melspec_params, log)
else:
spec = spectrogram(y_plot, spec_params, log)
if y_processed is not None:
if mel:
spec_processed = melspectrogram(y_plot_processed,
melspec_params, log)
else:
spec_processed = spectrogram(y_plot_processed,
spec_params, log)
height, width = spec.shape
st.write(f"{height} x {width} matrix")
if y_processed is not None:
with st.spinner("Plotting"):
fig = plt.figure(figsize=(12, 8))
ax1 = fig.add_subplot(2, 1, 1)
if mel:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="mel",
fmin=fmin,
fmax=fmax,
ax=ax1)
else:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="linear",
ax=ax1)
used_color = [] # type: ignore
for i, event in events_in_period.iterrows():
onset = event.onset
offset = event.offset
color = colors[uniq_labels.index(event.ebird_code)]
if color not in used_color:
label = event.ebird_code
used_color.append(color)
else:
label = "_" + event.ebird_code
ax1.axvspan(
onset,
offset,
facecolor=color,
alpha=0.5,
label=label)
ax1.legend()
ax2 = fig.add_subplot(2, 1, 2)
if mel:
display.specshow(
spec_processed,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="mel",
fmin=fmin,
fmax=fmax,
ax=ax2)
else:
display.specshow(
spec_processed,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="linear",
ax=ax2)
else:
with st.spinner("Plotting"):
fig = plt.figure(figsize=(12, 4))
if mel:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="mel",
fmin=fmin,
fmax=fmax)
plt.colorbar()
else:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="linear")
plt.colorbar()
used_color = [] # type: ignore
for i, event in events_in_period.iterrows():
onset = event.onset
offset = event.offset
color = colors[uniq_labels.index(event.ebird_code)]
if color not in used_color:
label = event.ebird_code
used_color.append(color)
else:
label = "_" + event.ebird_code
plt.axvspan(
onset,
offset,
facecolor=color,
alpha=0.5,
label=label)
plt.legend()
st.pyplot(fig)
def specshow(y: np.ndarray, sr: int, y_processed=None, tp: pd.DataFrame=None, fp: pd.DataFrame=None):
plot_spectrogram = st.checkbox("Spectrogram plot")
if plot_spectrogram:
st.sidebar.markdown("#### Spectrogram plot settings")
start_second = st.sidebar.number_input(
"start second",
min_value=0,
max_value=len(y) // sr,
value=0,
step=1,
key="specshow_start")
end_second = st.sidebar.number_input(
"end second",
min_value=0,
max_value=len(y) // sr,
value=len(y) // sr,
step=1,
key="specshow_end")
start_index = start_second * sr
if end_second == len(y) // sr:
end_index = len(y)
else:
end_index = end_second * sr
y_plot = y[start_index:end_index]
if y_processed is not None:
y_plot_processed = y_processed[start_index:end_index]
st.sidebar.markdown("##### (Mel)spectrogram parameters")
mel = st.sidebar.checkbox("Mel scale", value=True)
n_fft = st.sidebar.number_input(
"n_fft", min_value=64, max_value=8192, value=1024, step=64)
hop_length = st.sidebar.number_input(
"hop_length", min_value=1, max_value=2048, value=320, step=10)
if mel:
n_mels = st.sidebar.number_input(
"n_mels", min_value=1, max_value=512, value=64, step=16)
fmin = st.sidebar.number_input(
"fmin", min_value=1, max_value=8192, value=20, step=100)
fmax = st.sidebar.number_input(
"fmax", min_value=4000, max_value=44100, value=14000, step=100)
log = st.sidebar.checkbox("apply log", value=True)
if mel:
melspec_params = {
"n_fft": n_fft,
"hop_length": hop_length,
"n_mels": n_mels,
"fmin": fmin,
"fmax": fmax,
"sr": sr
}
else:
spec_params = {
"n_fft": n_fft,
"hop_length": hop_length
}
if st.button("Show melspectrogram"):
with st.spinner("Calculating melspectrogram"):
if mel:
spec = melspectrogram(y_plot, melspec_params, log)
else:
spec = spectrogram(y_plot, spec_params, log)
if y_processed is not None:
if mel:
spec_processed = melspectrogram(y_plot_processed,
melspec_params, log)
else:
spec_processed = spectrogram(y_plot_processed,
spec_params, log)
height, width = spec.shape
st.write(f"{height} x {width} matrix")
if y_processed is not None:
with st.spinner("Plotting"):
fig = plt.figure(figsize=(12, 8))
ax1 = fig.add_subplot(2, 1, 1)
if mel:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="mel",
fmin=fmin,
fmax=fmax,
ax=ax1)
else:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="linear",
ax=ax1)
ax2 = fig.add_subplot(2, 1, 2)
if mel:
display.specshow(
spec_processed,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="mel",
fmin=fmin,
fmax=fmax,
ax=ax2)
else:
display.specshow(
spec_processed,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="linear",
ax=ax2)
else:
with st.spinner("Plotting"):
fig = plt.figure(figsize=(12, 4))
ax = plt.axes()
if mel:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="mel",
fmin=fmin,
fmax=fmax)
plt.colorbar()
else:
display.specshow(
spec,
sr=sr,
hop_length=hop_length,
x_axis="time",
y_axis="linear")
plt.colorbar()
if tp is not None and len(tp) > 0:
for _, row in tp.iterrows():
rect = patches.Rectangle(
(row["t_min"], row["f_min"]),
row["t_max"] - row["t_min"],
row["f_max"] - row["f_min"],
linewidth=1,
edgecolor="g",
facecolor="g",
alpha=0.5,
label="tp")
ax.add_patch(rect)
if fp is not None and len(fp) > 0:
for _, row in fp.iterrows():
rect = patches.Rectangle(
(row["t_min"], row["f_min"]),
row["t_max"] - row["t_min"],
row["f_max"] - row["f_min"],
linewidth=1,
edgecolor="r",
facecolor="r",
alpha=0.5,
label="fp")
ax.add_patch(rect)
st.pyplot(fig)
| 37.973737
| 107
| 0.419854
| 1,809
| 18,797
| 4.180763
| 0.098397
| 0.04284
| 0.0357
| 0.0476
| 0.933756
| 0.924104
| 0.916171
| 0.902155
| 0.884437
| 0.884437
| 0
| 0.028651
| 0.483801
| 18,797
| 494
| 108
| 38.050607
| 0.750799
| 0.002022
| 0
| 0.910088
| 0
| 0
| 0.070221
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013158
| false
| 0
| 0.015351
| 0
| 0.032895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
104e4c1bac71e2a3cfbdadaf2cd745ca07fae398
| 1,800
|
py
|
Python
|
traversal_tests.py
|
Sorrop/py-graph-algorithms
|
e688fba3dd8aa44bcd3a608625fdf71649b83920
|
[
"MIT"
] | 5
|
2017-01-31T11:09:55.000Z
|
2022-03-16T15:38:54.000Z
|
traversal_tests.py
|
Sorrop/py-graph-algorithms
|
e688fba3dd8aa44bcd3a608625fdf71649b83920
|
[
"MIT"
] | null | null | null |
traversal_tests.py
|
Sorrop/py-graph-algorithms
|
e688fba3dd8aa44bcd3a608625fdf71649b83920
|
[
"MIT"
] | 6
|
2020-09-09T23:58:57.000Z
|
2021-10-16T18:49:02.000Z
|
import graph
from depth_first_search import depth_first_search
from breadth_first_search import breadth_first_search
edges = [(0, 1), (0, 2), (0, 3), (1, 4), (1, 5), (2, 6),
(2, 7), (3, 8), (3, 9), (4, 10), (4, 11)]
G, _ = graph.create_graph(edges)
start_vertex = G.get_vertex(0)
breadth = breadth_first_search(G)
breadth(G, start_vertex)
depth = depth_first_search(G)
depth(G, start_vertex)
print('Undirected Case.')
print(edges)
print(' ')
print('==============================')
print('Breadth First traversal of G')
for edge in breadth.breadth_traversal:
print((edge.endPoints()[0].element(), edge.endPoints()[1].element()))
print('==============================')
print('Depth First traversal of G')
for edge in depth.depth_traversal:
print((edge.endPoints()[0].element(), edge.endPoints()[1].element()))
print(' ')
print('==============================')
print('==============================')
print(' ')
edges = [('a', 'b'), ('c', 'a'), ('c', 'b'),
('d', 'c'), ('d', 'e'), ('b', 'e')]
G, _ = graph.create_graph(edges, True)
start_vertex = G.get_vertex('a')
breadth = breadth_first_search(G)
breadth(G, start_vertex)
depth = depth_first_search(G)
depth(G, start_vertex)
print('Directed Case.')
print(edges)
print(' ')
print('==============================')
print('Breadth First traversal of G')
for edge in breadth.breadth_traversal:
print((edge.endPoints()[0].element(), edge.endPoints()[1].element()))
print('==============================')
print('Depth First traversal of G')
for edge in depth.depth_traversal:
print((edge.endPoints()[0].element(), edge.endPoints()[1].element()))
| 33.333333
| 74
| 0.535
| 217
| 1,800
| 4.290323
| 0.193548
| 0.09667
| 0.068743
| 0.07304
| 0.807734
| 0.71536
| 0.71536
| 0.71536
| 0.71536
| 0.71536
| 0
| 0.022822
| 0.196667
| 1,800
| 53
| 75
| 33.962264
| 0.621024
| 0
| 0
| 0.711111
| 0
| 0
| 0.255867
| 0.103034
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0.488889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
10a4806044f0d753cfe47dd7983b3682eff504b2
| 97,171
|
py
|
Python
|
functions.py
|
FatinaBasmadji/space-robot-planar-case
|
c1cacba7ef2c2509a282577147cd90421f471360
|
[
"MIT"
] | null | null | null |
functions.py
|
FatinaBasmadji/space-robot-planar-case
|
c1cacba7ef2c2509a282577147cd90421f471360
|
[
"MIT"
] | null | null | null |
functions.py
|
FatinaBasmadji/space-robot-planar-case
|
c1cacba7ef2c2509a282577147cd90421f471360
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
import numpy as np
from math import cos, sin
# In[ ]:
# Space robot parameters
pi = 3.14159
p1 = 0.3696 # manipulator mounting point in x axis
p2 = 0.001279 # manipulator mounting point in y axis
a1 = 0.1805 # center of mass of the first link
a2 = 0.2000 # center of mass of the second link
a3 = 0.1029 # center of mass of the third link
L1 = 0.4488 # the length of the first link
L2 = 0.4499 # the length of the second link
L3 = 0.3545 # the length of the third link
m0 = 64.859 # the mass of the satellite
m1 = 2.9130 # the mass of the first kinematic pair
m2 = 2.6460 # the mass of the second kinematic pair
m3 = 1.6990 # the mass of the third kinematic pair
I0 = 2.6952 # Satellite moment of inertia
I1 = 0.091391 # Moment of inertia of the first kinematic pair
I2 = 0.081375 # Moment of inertia of the second kinematic pair
I3 = 0.021904 # Moment of inertia of the third kinematic pair
d = 0.3536 # Distance from base geometry center to each corner
# In[ ]:
# Hyper parameters
dt = 0.01
nstep = 500
# In[ ]:
def kinematics(state):
EE = np.zeros(3)
Jacobian = np.zeros((3,6))
EE[0] = state[0] + p1*cos(state[2]) - p2*sin(state[2]) + L1*cos(state[2]+state[3]) + L2*cos(state[2]+state[3]+state[4]) + L3*cos(state[2]+state[3]+state[4]+state[5]);
EE[1] = state[1] + p1*sin(state[2]) + p2*cos(state[2]) + L1*sin(state[2]+state[3]) + L2*sin(state[2]+state[3]+state[4]) + L3*sin(state[2]+state[3]+state[4]+state[5]);
EE[2] = state[2] + state[3] + state[4] + state[5];
Jacobian[0][0] = 1;
Jacobian[0][1] = 0;
Jacobian[0][2] = - L3*sin(state[2]+state[3]+state[4]+state[5]) - L1*sin(state[2]+state[3]) - p2*cos(state[2]) - p1*sin(state[2]) - L2*sin(state[2]+state[3]+state[4]);
Jacobian[0][3] = - L3*sin(state[2]+state[3]+state[4]+state[5]) - L1*sin(state[2]+state[3]) - L2*sin(state[2]+state[3]+state[4]);
Jacobian[0][4] = - L3*sin(state[2]+state[3]+state[4]+state[5]) - L2*sin(state[2]+state[3]+state[4]);
Jacobian[0][5] = - L3*sin(state[2]+state[3]+state[4]+state[5]);
Jacobian[1][0] = 0;
Jacobian[1][1] = 1;
Jacobian[1][2] = L3*cos(state[2]+state[3]+state[4]+state[5]) + L1*cos(state[2]+state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2]+state[3]+state[4]);
Jacobian[1][3] = L3*cos(state[2]+state[3]+state[4]+state[5]) + L1*cos(state[2]+state[3]) + L2*cos(state[2]+state[3]+state[4]);
Jacobian[1][4] = L3*cos(state[2]+state[3]+state[4]+state[5]) + L2*cos(state[2]+state[3]+state[4]);
Jacobian[1][5] = L3*cos(state[2]+state[3]+state[4]+state[5]);
Jacobian[2][0] = 0;
Jacobian[2][1] = 0;
Jacobian[2][2] = 1;
Jacobian[2][3] = 1;
Jacobian[2][4] = 1;
Jacobian[2][5] = 1;
return EE, Jacobian
# In[ ]:
def dynamics(state):
J0v = np.zeros((2,6))
J0w = np.zeros((1,6))
J1v = np.zeros((2,6))
J1w = np.zeros((1,6))
J2v = np.zeros((2,6))
J2w = np.zeros((1,6))
J3v = np.zeros((2,6))
J3w = np.zeros((1,6))
C = np.zeros((6,1))
J0v = [[1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0]]
J0vt = np.transpose(J0v)
J0w = [[0, 0, 1, 0, 0, 0]]
J0wt = np.transpose(J0w)
var1 = - p1*sin(state[2]) -p2*cos(state[2]) - a1*sin(state[2]+state[3])
var2 = -a1*sin(state[2]+state[3])
var3 = -p2*sin(state[2]) + p1*cos(state[2])+a1*cos(state[3]+state[2])
var4 = a1*cos(state[3]+state[2])
J1v = [[1, 0, var1, var2, 0, 0],[0, 1, var3, var4, 0, 0]];
J1vt = np.transpose(J1v)
J1w = [[0, 0, 1, 1, 0, 0]]
J1wt = np.transpose(J1w)
var5 = -p2*cos(state[2]) - L1*sin(state[3]+state[2]) - p1*sin(state[2]) - a2*sin(state[3]+state[4]+state[2]);
var6 = -L1*sin(state[3]+state[2]) - a2*sin(state[3]+state[4]+state[2]);
var7 = -a2*sin(state[3]+state[4]+state[2]);
var8 = -p2*sin(state[2]) + a2*cos(state[3]+state[4]+state[2]) + L1*cos(state[3]+state[2])+p1*cos(state[2]);
var9 = a2*cos(state[3]+state[4]+state[2]) + L1*cos(state[3]+state[2]);
var10 = a2*cos(state[3]+state[4]+state[2]);
J2v = [[1, 0, var5, var6, var7, 0],[0, 1, var8, var9, var10, 0]]
J2vt = np.transpose(J2v)
J2w = [[0, 0, 1, 1, 1, 0]]
J2wt = np.transpose(J2w)
var11 = -p2*cos(state[2]) - L1*sin(state[3]+state[2]) - p1*sin(state[2]) - L2*sin(state[3]+state[4]+state[2]) -a3*sin(state[2]+state[3]+state[4]+state[5]);
var12 = -L1*sin(state[3]+state[2]) - L2*sin(state[3]+state[4]+state[2]) - a3*sin(state[3]+state[4]+state[2]+state[5]);
var13 = -L2*sin(state[3]+state[4]+state[2]) - a3*sin(state[3]+state[4]+state[2]+state[5]);
var14 = -a3*sin(state[3]+state[4]+state[2]+state[5]);
var15 = -p2*sin(state[2]) + L2*cos(state[3]+state[4]+state[2]) + L1*cos(state[3]+state[2])+p1*cos(state[2]) + a3*cos(state[2]+state[3]+state[4]+state[5]);
var16 = L2*cos(state[3]+state[4]+state[2]) + L1*cos(state[3]+state[2]) + a3*cos(state[2]+state[3]+state[4]+state[5]);
var17 = L2*cos(state[3]+state[4]+state[2]) + a3*cos(state[2]+state[3]+state[4]+state[5]);
var18 = a3*cos(state[2]+state[3]+state[4]+state[5]);
J3v = [[1, 0, var11, var12, var13, var14],[0, 1, var15, var16, var17, var18]]
J3vt = np.transpose(J3v)
J3w = [[0, 0, 1, 1, 1, 1]]
J3wt = np.transpose(J3w)
Tv = np.matmul(m0*J0vt,J0v) + np.matmul(m1*J1vt,J1v) + np.matmul(m2*J2vt,J2v) + np.matmul(m3*J3vt,J3v)
Tw = J0wt*I0*J0w + J1wt*I1*J1w + J2wt*I2*J2w + J3wt*I3*J3w;
M = Tv+Tw;
C[0][0] = - state[10]*(state[8]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[9]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[10]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + a3*m3*state[11]*cos(state[2] + state[3] + state[4] + state[5])) - state[8]*(state[8]*(m2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m1*(a1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]))) + state[10]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[9]*(m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + a1*m1*cos(state[2] + state[3])) + a3*m3*state[11]*cos(state[2] + state[3] + state[4] + state[5])) - state[9]*(state[10]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[8]*(m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + a1*m1*cos(state[2] + state[3])) + state[9]*(m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + a1*m1*cos(state[2] + state[3])) + a3*m3*state[11]*cos(state[2] + state[3] + state[4] + state[5])) - state[11]*(a3*m3*state[8]*cos(state[2] + state[3] + state[4] + state[5]) + a3*m3*state[9]*cos(state[2] + state[3] + state[4] + state[5]) + a3*m3*state[10]*cos(state[2] + state[3] + state[4] + state[5]) + a3*m3*state[11]*cos(state[2] + state[3] + state[4] + state[5]));
C[1][0] = - state[11]*(a3*m3*state[8]*sin(state[2] + state[3] + state[4] + state[5]) + a3*m3*state[9]*sin(state[2] + state[3] + state[4] + state[5]) + a3*m3*state[10]*sin(state[2] + state[3] + state[4] + state[5]) + a3*m3*state[11]*sin(state[2] + state[3] + state[4] + state[5])) - state[8]*(state[9]*(m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a1*m1*sin(state[2] + state[3])) + state[10]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + state[8]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + m2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) + m1*(a1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]))) + a3*m3*state[11]*sin(state[2] + state[3] + state[4] + state[5])) - state[9]*(state[8]*(m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a1*m1*sin(state[2] + state[3])) + state[9]*(m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a1*m1*sin(state[2] + state[3])) + state[10]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + a3*m3*state[11]*sin(state[2] + state[3] + state[4] + state[5])) - state[10]*(state[8]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + state[9]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + state[10]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + a3*m3*state[11]*sin(state[2] + state[3] + state[4] + state[5]));
C[2][0] = (state[9]*(state[6]*(m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + a1*m1*cos(state[2] + state[3])) + state[7]*(m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a1*m1*sin(state[2] + state[3]))))/2 - state[8]*(state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) + state[10]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4]))) - state[9]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) - m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4]))*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m1*cos(state[2] + state[3])*a1*(a1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2])) - m1*sin(state[2] + state[3])*a1*(a1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2])) + a1*m1*cos(state[2] + state[3])*(cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*a1) - a1*m1*sin(state[2] + state[3])*(cos(state[2])*p1 - sin(state[2])*p2 + cos(state[2] + state[3])*a1))) - state[6]*(state[9]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m1*cos(state[2] + state[3])*a1) + state[10]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + state[8]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m1*(cos(state[2])*p1 - sin(state[2])*p2 + cos(state[2] + state[3])*a1) + m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2)) + m3*state[11]*cos(state[2] + state[3] + state[4] + state[5])*a3) - state[7]*(state[8]*(m2*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m1*(cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*a1)) + state[9]*(m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m1*sin(state[2] + state[3])*a1) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + m3*state[11]*sin(state[2] + state[3] + state[4] + state[5])*a3) + (state[11]*(a3*m3*cos(state[2] + state[3] + state[4] + state[5])*state[6] + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*state[7]))/2 - state[11]*(state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + state[10]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)) + state[9]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1))) - state[9]*(state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))) - state[9]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4]))*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) - m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + a1*m1*cos(state[2] + state[3])*(cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*a1) - a1*m1*sin(state[2] + state[3])*(cos(state[2])*p1 - sin(state[2])*p2 + cos(state[2] + state[3])*a1) + a1*m1*cos(state[2] + state[3])*sin(state[2] + state[3])*a1 - a1*m1*sin(state[2] + state[3])*cos(state[2] + state[3])*a1) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1))) + (state[10]*(state[6]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[7]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4]))))/2 - state[10]*(state[9]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2) + state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])))) + (state[8]*(state[6]*(m2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m1*(a1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]))) + state[7]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + m2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) + m1*(a1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2])))))/2 + (state[6]*(state[10]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + state[8]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m1*(cos(state[2])*p1 - sin(state[2])*p2 + cos(state[2] + state[3])*a1) + m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2)) + state[9]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m1*cos(state[2] + state[3])*a1) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2 + (state[7]*(state[9]*(m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m1*sin(state[2] + state[3])*a1) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + state[8]*(m2*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m1*(cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*a1)) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2;
C[3][0] = (state[10]*(state[6]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[7]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + state[8]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1))))/2 - state[11]*(state[10]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)) + state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3)) - state[10]*(state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) - a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2) + state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1))) - (state[8]*(state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) - state[6]*(m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + a1*m1*cos(state[2] + state[3])) + state[8]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) - m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4]))*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m1*cos(state[2] + state[3])*a1*(a1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2])) - m1*sin(state[2] + state[3])*a1*(a1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2])) + a1*m1*cos(state[2] + state[3])*(cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*a1) - a1*m1*sin(state[2] + state[3])*(cos(state[2])*p1 - sin(state[2])*p2 + cos(state[2] + state[3])*a1)) + state[9]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) - m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m1*cos(state[2] + state[3])*a1*(a1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2])) - m1*sin(state[2] + state[3])*a1*(a1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2])) - a1*m1*cos(state[2] + state[3])*sin(state[2] + state[3])*a1 + a1*m1*sin(state[2] + state[3])*cos(state[2] + state[3])*a1) - state[7]*(m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a1*m1*sin(state[2] + state[3])) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) - m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])))))/2 + (state[6]*(state[10]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + state[8]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m1*cos(state[2] + state[3])*a1) + state[9]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m1*cos(state[2] + state[3])*a1) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2 + (state[7]*(state[8]*(m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m1*sin(state[2] + state[3])*a1) + state[9]*(m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m1*sin(state[2] + state[3])*a1) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2 + (state[11]*(state[8]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*state[6] + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*state[7]))/2 - state[9]*(state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) - a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1))) - state[6]*(state[8]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m1*cos(state[2] + state[3])*a1) + state[9]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) + m1*cos(state[2] + state[3])*a1) + state[10]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + m3*state[11]*cos(state[2] + state[3] + state[4] + state[5])*a3) - state[7]*(state[8]*(m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m1*sin(state[2] + state[3])*a1) + state[9]*(m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m1*sin(state[2] + state[3])*a1) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + m3*state[11]*sin(state[2] + state[3] + state[4] + state[5])*a3) - state[8]*(state[10]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) - a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4]))) - state[9]*(m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) - m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m1*cos(state[2] + state[3])*a1*(a1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2])) - m1*sin(state[2] + state[3])*a1*(a1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2])) - a1*m1*cos(state[2] + state[3])*sin(state[2] + state[3])*a1 + a1*m1*sin(state[2] + state[3])*cos(state[2] + state[3])*a1) + state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])))) + (state[9]*(state[6]*(m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + a1*m1*cos(state[2] + state[3])) + state[7]*(m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a1*m1*sin(state[2] + state[3])) - state[8]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4]))*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m2*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1)*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) - m2*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + a1*m1*cos(state[2] + state[3])*(cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*a1) - a1*m1*sin(state[2] + state[3])*(cos(state[2])*p1 - sin(state[2])*p2 + cos(state[2] + state[3])*a1) + a1*m1*cos(state[2] + state[3])*sin(state[2] + state[3])*a1 - a1*m1*sin(state[2] + state[3])*cos(state[2] + state[3])*a1)))/2;
C[4][0] = (state[8]*(state[6]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) - state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) - m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2) + state[8]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4]))) + state[7]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) - state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) - m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) + state[9]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) - a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])))))/2 - state[7]*(state[8]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + state[9]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + m3*state[11]*sin(state[2] + state[3] + state[4] + state[5])*a3) + state[9]*(state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2) - state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2))) - state[11]**2*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + (state[9]*(state[6]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[7]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + state[9]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) - a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1)) - state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2) - state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) - m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))) + state[8]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1))))/2 - state[6]*(state[8]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + state[9]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + state[10]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + m3*state[11]*cos(state[2] + state[3] + state[4] + state[5])*a3) + (state[7]*(state[8]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + state[9]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m2*sin(state[2] + state[3] + state[4])*a2) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2 + state[8]*(state[9]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + a2*cos(state[2] + state[3] + state[4])) - m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + a2*sin(state[2] + state[3] + state[4])) - m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4]))) - state[11]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) + state[10]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2)*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) - m2*sin(state[2] + state[3] + state[4])*a2*(L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + a2*cos(state[2] + state[3] + state[4])) + m2*cos(state[2] + state[3] + state[4])*a2*(L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + a2*sin(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2)) + (state[10]*(state[6]*(m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a2*m2*cos(state[2] + state[3] + state[4])) + state[7]*(m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a2*m2*sin(state[2] + state[3] + state[4])) + state[9]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2] + state[3])*L1) - a2*m2*cos(state[2] + state[3] + state[4])*(sin(state[2] + state[3] + state[4])*a2 + sin(state[2] + state[3])*L1) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2) + state[8]*(m3*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - m3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4]))*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + a2*m2*sin(state[2] + state[3] + state[4])*(cos(state[2] + state[3] + state[4])*a2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a2*m2*cos(state[2] + state[3] + state[4])*(cos(state[2])*p2 + sin(state[2] + state[3] + state[4])*a2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + a2*m2*cos(state[2] + state[3] + state[4])*sin(state[2] + state[3] + state[4])*a2 - a2*m2*sin(state[2] + state[3] + state[4])*cos(state[2] + state[3] + state[4])*a2)))/2 + (state[11]*(state[8]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)) + state[9]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*state[6] + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*state[7]))/2 + (state[6]*(state[8]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + state[9]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + state[10]*(m3*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) + m2*cos(state[2] + state[3] + state[4])*a2) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2 - state[10]*state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2));
C[5][0] = (state[10]*(state[8]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4]))) + state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + state[10]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)) + state[9]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1)) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*state[6] + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*state[7]))/2 - state[9]*(state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) - state[10]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) - m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])))) + (state[6]*(m3*cos(state[2] + state[3] + state[4] + state[5])*a3*state[8] + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*state[9] + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*state[10] + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2 + state[8]*(state[10]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) - m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) - state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + state[9]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) - m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) + m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])))) + (state[7]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*state[8] + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*state[9] + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*state[10] + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*state[11]))/2 - state[6]*(m3*state[8]*cos(state[2] + state[3] + state[4] + state[5])*a3 + m3*state[9]*cos(state[2] + state[3] + state[4] + state[5])*a3 + m3*state[10]*cos(state[2] + state[3] + state[4] + state[5])*a3 + m3*state[11]*cos(state[2] + state[3] + state[4] + state[5])*a3) + (state[8]*(state[8]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) + state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + state[10]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) + state[9]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + p1*cos(state[2]) - p2*sin(state[2]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + p2*cos(state[2]) + p1*sin(state[2]) + L2*sin(state[2] + state[3] + state[4]))) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*state[6] + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*state[7]))/2 - state[7]*(m3*state[8]*sin(state[2] + state[3] + state[4] + state[5])*a3 + m3*state[9]*sin(state[2] + state[3] + state[4] + state[5])*a3 + m3*state[10]*sin(state[2] + state[3] + state[4] + state[5])*a3 + m3*state[11]*sin(state[2] + state[3] + state[4] + state[5])*a3) + (state[11]*(state[8]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + state[10]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + state[9]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*state[6] + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*state[7]))/2 + (state[9]*(state[8]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2])*p1 + cos(state[2] + state[3])*L1 - sin(state[2])*p2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + cos(state[2])*p2 + sin(state[2])*p1 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))) + state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3) + state[10]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2)) + state[9]*(a3*m3*sin(state[2] + state[3] + state[4] + state[5])*(cos(state[2] + state[3] + state[4] + state[5])*a3 + cos(state[2] + state[3] + state[4])*L2 + cos(state[2] + state[3])*L1) - a3*m3*cos(state[2] + state[3] + state[4] + state[5])*(sin(state[2] + state[3] + state[4] + state[5])*a3 + sin(state[2] + state[3] + state[4])*L2 + sin(state[2] + state[3])*L1) + m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L1*cos(state[2] + state[3]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L1*sin(state[2] + state[3]) + L2*sin(state[2] + state[3] + state[4]))) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*state[6] + a3*m3*sin(state[2] + state[3] + state[4] + state[5])*state[7]))/2 - state[10]*state[11]*(m3*sin(state[2] + state[3] + state[4] + state[5])*a3*(a3*cos(state[2] + state[3] + state[4] + state[5]) + L2*cos(state[2] + state[3] + state[4])) - m3*cos(state[2] + state[3] + state[4] + state[5])*a3*(a3*sin(state[2] + state[3] + state[4] + state[5]) + L2*sin(state[2] + state[3] + state[4])) + a3*m3*cos(state[2] + state[3] + state[4] + state[5])*sin(state[2] + state[3] + state[4] + state[5])*a3 - a3*m3*sin(state[2] + state[3] + state[4] + state[5])*cos(state[2] + state[3] + state[4] + state[5])*a3);
return M, C
# In[ ]:
def distance(ini_pos, fin_pos):
dist = (ini_pos[0]-fin_pos[0])**2 + (ini_pos[1]-fin_pos[1])**2
dist = np.sqrt(dist)
return dist
# In[ ]:
def step(state, action, ee_final):
action = action.numpy()
x = state
old_x = x
old_EE, old_Jacobian = kinematics(x)
q = np.array([[state[0]], [state[1]], [state[2]], [state[3]], [state[4]], [state[5]]])
M, C = dynamics(x)
rsT = [[-state[1], state[0]]]
Ms_11 = M[0:2,0:2]
Ms_12 = M[0:2,2]
Ms_12 = Ms_12.reshape((2, 1))
MsT_12 = np.transpose(Ms_12)
Ms_22 = M[2,2]
H1[0:2,0:2] = Ms_11
H1[0:2,2:3] = Ms_12
H1[2:3,0:2] = MsT_12 + np.matmul(rsT,Ms_11)
H1[2,2] = Ms_22 + np.matmul(rsT,Ms_12)
H1inv = np.linalg.inv(H1)
Msm_11 = M[0:2,3:6]
Msm_21 = M[2:3,3:6]
H2[0:2,0:3] = Msm_11
H2[2:3,0:3] = Msm_21 + np.matmul(rsT,Msm_11)
teta_prim = [[action[0]], [action[1]], [action[2]]]
sat_vel = np.matmul(np.matmul(-H1inv,H2),teta_prim)
dq = np.vstack([sat_vel,teta_prim])
k1 = dt*dq
k2 = dt*(dq + 0.5*k1)
k3 = dt*(dq + 0.5*k2)
k4 = dt*(dq + k3)
q = q + (k1 + 2*k2 + 2*k3 + k4)/6
new_x = [q[0], q[1], q[2], q[3], q[4], q[5], dq[0], dq[1], dq[2], dq[3], dq[4], dq[5]]
new_EE, Jacobian = kinematics(new_x)
ini_pos = [ee_final[0], ee_final[1], ee_final[2]]
fin_pos = [new_EE[0], new_EE[1], new_EE[2]]
done = 0
reward = -distance(ini_pos, fin_pos)
if abs(ee_final_x-new_EE[0])<0.001 and abs(ee_final_y-new_EE[1])<0.001:
done = 1
reward = 0
new_x = np.array([q[0], q[1], q[2], q[3], q[4], q[5], dq[0], dq[1], dq[2], dq[3], dq[4], dq[5]])
new_x = new_x.reshape(12,)
return new_x, new_EE, done, reward
| 506.098958
| 25,146
| 0.562297
| 19,182
| 97,171
| 2.84548
| 0.009749
| 0.251292
| 0.378476
| 0.411784
| 0.962039
| 0.956213
| 0.953519
| 0.953116
| 0.950203
| 0.948958
| 0
| 0.120065
| 0.132416
| 97,171
| 191
| 25,147
| 508.748691
| 0.527377
| 0.007615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027211
| false
| 0
| 0.013605
| 0
| 0.068027
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
52b2893d801640e5dcf82ab0cfc28d541ace2709
| 128
|
py
|
Python
|
scCloud/scCloud/annotate_cluster/__init__.py
|
broadinstitute/scRNA-Seq
|
03aafb92274a97f4d634ac9e42f0e0feca91ed98
|
[
"BSD-3-Clause"
] | 12
|
2019-04-08T11:39:33.000Z
|
2022-02-22T02:50:27.000Z
|
scCloud/scCloud/annotate_cluster/__init__.py
|
broadinstitute/scRNA-Seq
|
03aafb92274a97f4d634ac9e42f0e0feca91ed98
|
[
"BSD-3-Clause"
] | null | null | null |
scCloud/scCloud/annotate_cluster/__init__.py
|
broadinstitute/scRNA-Seq
|
03aafb92274a97f4d634ac9e42f0e0feca91ed98
|
[
"BSD-3-Clause"
] | 3
|
2019-03-06T20:44:33.000Z
|
2020-02-17T13:43:46.000Z
|
from .annotate_cluster import annotate_clusters
from .run_annotate_cluster import run_annotate_cluster, annotate_anndata_object
| 42.666667
| 79
| 0.90625
| 17
| 128
| 6.352941
| 0.470588
| 0.416667
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070313
| 128
| 2
| 80
| 64
| 0.907563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
52b2f348984654cc44d1955932c22bfab70fad0f
| 20,091
|
py
|
Python
|
menu/functions.py
|
WillRazorFace/InstaMax
|
4ebb5ee5ad88c2e2a2283bcd13e264cc99513627
|
[
"Apache-2.0"
] | null | null | null |
menu/functions.py
|
WillRazorFace/InstaMax
|
4ebb5ee5ad88c2e2a2283bcd13e264cc99513627
|
[
"Apache-2.0"
] | null | null | null |
menu/functions.py
|
WillRazorFace/InstaMax
|
4ebb5ee5ad88c2e2a2283bcd13e264cc99513627
|
[
"Apache-2.0"
] | null | null | null |
from core.instabot import Bot
from os import system, path
from .constants import CLEAR_CONSOLE_COMMAND, OPTIONS_FILE, DRIVER_MENU
from time import sleep
def configure() -> tuple:
driver_options = {'1': 'chrome', '2': 'firefox', '3': 'safari'}
system(CLEAR_CONSOLE_COMMAND)
print('Enter your Instagram account username: @', end='')
username = input()
print('Enter your Instagram account password: ', end='')
password = input()
system(CLEAR_CONSOLE_COMMAND)
while True:
print('Select your driver model')
print(DRIVER_MENU)
driver = input('>>> ')
try:
driver = driver_options[driver]
system(CLEAR_CONSOLE_COMMAND)
while True:
print('Enter the path to your driver (/example/driver/path/driver.exe): ', end='')
driver_path = input('')
if path.isfile(driver_path):
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid path, file does not exist\n')
continue
break
except KeyError:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option\n')
continue
with open(OPTIONS_FILE, 'w') as file:
file.write(username + '\n' + password + '\n' + driver + '\n' + driver_path)
return username, password, driver, driver_path
def follow_suggested(bot_instance: Bot) -> int:
system(CLEAR_CONSOLE_COMMAND)
while True:
print('How many users do you want to follow? (numbers only) ', end='')
quantity = input()
try:
quantity = int(quantity)
while True:
ignore = []
print("Are there any accounts you don't want to follow? (Y/N) ", end='')
dont_follow = input()
if dont_follow == 'Y' or dont_follow == 'y':
system(CLEAR_CONSOLE_COMMAND)
while True:
print('[1] - Get accounts from a file (one user per line)\n[2] - Insert accounts one by one\n')
accounts_input = input('>>> ')
if accounts_input == '1':
system(CLEAR_CONSOLE_COMMAND)
while True:
print('Enter the path to the file (example/path/to/the/file): ', end='')
file_path = input()
if path.isfile(file_path):
with open(file_path, 'r') as file:
for account in file.readlines():
ignore.append(account.strip())
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid path\n')
continue
break
elif accounts_input == '2':
while True:
system(CLEAR_CONSOLE_COMMAND)
print(f'{len(ignore)} accounts to not follow\n')
print('Enter the account username (type "exit" to stop): ', end='')
username = input()
if username == 'exit':
break
ignore.append(username)
continue
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option\n')
continue
elif dont_follow == 'N' or dont_follow == 'n':
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option\n')
continue
break
break
except ValueError:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid quantity\n')
continue
system(CLEAR_CONSOLE_COMMAND)
print('Following')
followed = bot_instance.follow_suggested(quantity, ignore)
system(CLEAR_CONSOLE_COMMAND)
print(f'{followed} users followed. Press anything to return to the menu.', end='')
input()
def like_feed(bot_instance: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
while True:
print('How many posts from feed do you want to like? (numbers only) ', end='')
quantity = input()
try:
quantity = int(quantity)
except ValueError:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid quantity\n')
continue
print('Do you want to comment on posts that are liked? (Y/N) (This will delay the process in trying to avoid Instagram comment blocking. You can change comments in the "comments.txt" file) ', end='')
comment = input()
if comment == 'Y' or comment == 'y':
comment = True
elif comment == 'N' or comment == 'n':
comment = False
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option\n')
continue
break
system(CLEAR_CONSOLE_COMMAND)
print(f'Liking posts from your feed')
liked_posts = bot_instance.like_feed_posts(quantity, comment)
system(CLEAR_CONSOLE_COMMAND)
print(f'{liked_posts} posts liked. Press anything to return to the menu.', end='')
input()
def like_posts(bot_instance: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
while True:
print('How many posts do you want to like? (numbers only) ', end='')
quantity = input()
try:
quantity = int(quantity)
print('Enter the posts hashtag: #', end='')
hashtag = input()
except ValueError:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid quantity\n')
continue
print('Do you want to comment on posts that are liked? (Y/N) (This will delay the process in trying to avoid Instagram comment blocking. You can change comments in the "comments.txt" file) ', end='')
comment = input()
if comment == 'Y' or comment == 'y':
comment = True
elif comment == 'N' or comment == 'n':
comment = False
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option\n')
continue
break
system(CLEAR_CONSOLE_COMMAND)
print(f'Liking posts from #{hashtag}')
liked_posts = bot_instance.like_posts_by_hashtag(hashtag, quantity, comment)
system(CLEAR_CONSOLE_COMMAND)
print(f'{liked_posts} posts liked. Press anything to return to the menu.', end='')
input()
def get_followers(bot_instance: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
all = False
while True:
print('Enter the instagram account username: @', end='')
account = input()
print('How many followers you want to get? (numbers only) (type "all" to get all followers) ', end='')
quantity = input()
try:
quantity = int(quantity)
except ValueError:
if quantity == 'all' or quantity == 'All' or quantity == 'ALL':
all = True
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid quantity\n')
continue
break
system(CLEAR_CONSOLE_COMMAND)
print(f'Searching for followers of {account}')
followers = [follower for follower in bot_instance.get_followers(account, quantity, all)]
while True:
system(CLEAR_CONSOLE_COMMAND)
for follower in followers:
print(follower)
print(f'\n{len(followers)} followers found on @{account}. Do you want to save this information into a file? (Y/N) ', end='')
save = input()
if save == 'Y' or save == 'y':
while True:
print('\nEnter the path for the file to be saved: ', end='')
path = input()
try:
with open(path, 'w') as file:
for follower in followers:
file.write(follower + '\n')
except FileNotFoundError:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid path')
continue
break
print(f'Information saved in {path}. Press anything to return to menu.', end='')
input()
break
elif save == 'N' or save == 'n':
print('\nNo information saved. Press anything to return to menu.', end='')
input()
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option')
continue
def search_follower(bot_instance: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
print('Enter the account to be searched: @', end='')
search_account = input()
print('Enter the account to be found: @', end='')
account = input()
system(CLEAR_CONSOLE_COMMAND)
print(f'Searching for @{account} in @{search_account} list of followers')
is_follower = bot_instance.search_follower(search_account, account)
if is_follower:
print(f'Found. @{account} is following @{search_account}. Press anything to return to menu.', end='')
input()
else:
print(f'Not found. @{account} is not following @{search_account}. Press anything to return to menu.', end='')
input()
def get_following(bot_instance: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
all = False
while True:
print('Enter the instagram account username: @', end='')
account = input()
print('How many following users you want to get? (numbers only) (type "all" to get all following users) ', end='')
quantity = input()
try:
quantity = int(quantity)
except ValueError:
if quantity == 'all' or quantity == 'All' or quantity == 'ALL':
all = True
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid quantity\n')
continue
break
system(CLEAR_CONSOLE_COMMAND)
print(f'Searching for following users in @{account}')
following = [user for user in bot_instance.get_following(account, quantity, all)]
while True:
system(CLEAR_CONSOLE_COMMAND)
for user in following:
print(user)
print(f'\n{len(following)} following users found on @{account}. Do you want to save this information into a file? (Y/N) ', end='')
save = input()
if save == 'Y' or save == 'y':
while True:
print('\nEnter the path for the file to be saved: ', end='')
path = input()
try:
with open(path, 'w') as file:
for user in following:
file.write(user + '\n')
except FileNotFoundError:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid path')
continue
break
print(f'Information saved in {path}. Press anything to return to menu.', end='')
input()
break
elif save == 'N' or save == 'n':
print('\nNo information saved. Press anything to return to menu.', end='')
input()
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option')
continue
def search_following(bot_instance: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
print('Enter the account to be searched: @', end='')
search_account = input()
print('Enter the account to be found: @', end='')
account = input()
system(CLEAR_CONSOLE_COMMAND)
print(f'Searching for @{account} in @{search_account} list of following users')
is_following = bot_instance.search_following(search_account, account)
if is_following:
print(f'Found. @{account} is followed by @{search_account}. Press anything to return to menu.', end='')
input()
else:
print(f'Not found. @{account} is not followed by @{search_account}. Press anything to return to menu.', end='')
input()
def search_not_followers(bot_instance: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
print('Searching for not followers')
not_followers = bot_instance.search_not_followers()
while True:
system(CLEAR_CONSOLE_COMMAND)
for user in not_followers:
print(user)
print(f'\n{len(not_followers)} not followers found on your account. Do you want to save this information into a file? (Y/N) ', end='')
save = input()
if save == 'Y' or save == 'y':
while True:
print('\nEnter the path for the file to be saved: ', end='')
path = input()
try:
with open(path, 'w') as file:
for not_follower in not_followers:
file.write(not_follower + '\n')
except FileNotFoundError:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid path')
continue
break
print(f'Information saved in {path}. Press anything to return to menu.', end='')
input()
break
elif save == 'N' or save == 'n':
print('\nNo information saved. Press anything to return to menu.', end='')
input()
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option')
continue
def unfollow_not_followers(bot_instace: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
while True:
ignore = []
print("Are there any accounts you don't want to unfollow? (Y/N) ", end='')
dont_unfollow = input()
if dont_unfollow == 'Y' or dont_unfollow == 'y':
system(CLEAR_CONSOLE_COMMAND)
while True:
print('[1] - Get accounts from a file (one user per line)\n[2] - Insert accounts one by one\n')
accounts_input = input('>>> ')
if accounts_input == '1':
system(CLEAR_CONSOLE_COMMAND)
while True:
print('Enter the path to the file (example/path/to/the/file): ', end='')
file_path = input()
if path.isfile(file_path):
with open(file_path, 'r') as file:
for account in file.readlines():
ignore.append(account.strip())
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid path\n')
continue
break
elif accounts_input == '2':
while True:
system(CLEAR_CONSOLE_COMMAND)
print(f'{len(ignore)} accounts to not follow\n')
print('Enter the account username (type "exit" to stop): ', end='')
username = input()
if username == 'exit':
break
ignore.append(username)
continue
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option\n')
continue
elif dont_unfollow == 'N' or dont_unfollow == 'n':
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option')
continue
break
system(CLEAR_CONSOLE_COMMAND)
print('Unfollowing not followers')
unfollowed = bot_instace.unfollow_not_followers(ignore)
system(CLEAR_CONSOLE_COMMAND)
print(f'{unfollowed} not followers unfollowed. Press anything to return to menu.', end='')
input()
def unfollow(bot_instace: Bot) -> None:
system(CLEAR_CONSOLE_COMMAND)
all = False
while True:
print('How many following users you want to unfollow? (numbers only) (type "all" to unfollow all users except those you specify) ', end='')
quantity = input()
try:
quantity = int(quantity)
except ValueError:
if quantity == 'all' or quantity == 'All' or quantity == 'ALL':
all = True
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid quantity\n')
continue
break
while True:
ignore = []
print("Are there any accounts you don't want to unfollow? (Y/N) ", end='')
dont_unfollow = input()
if dont_unfollow == 'Y' or dont_unfollow == 'y':
system(CLEAR_CONSOLE_COMMAND)
while True:
print('[1] - Get accounts from a file (one user per line)\n[2] - Insert accounts one by one\n')
accounts_input = input('>>> ')
if accounts_input == '1':
system(CLEAR_CONSOLE_COMMAND)
while True:
print('Enter the path to the file (example/path/to/the/file): ', end='')
file_path = input()
if path.isfile(file_path):
with open(file_path, 'r') as file:
for account in file.readlines():
ignore.append(account.strip())
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid path\n')
continue
break
elif accounts_input == '2':
while True:
system(CLEAR_CONSOLE_COMMAND)
print(f'{len(ignore)} accounts to not follow\n')
print('Enter the account username (type "exit" to stop): ', end='')
username = input()
if username == 'exit':
break
ignore.append(username)
continue
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option\n')
continue
elif dont_unfollow == 'N' or dont_unfollow == 'n':
break
else:
system(CLEAR_CONSOLE_COMMAND)
print('Invalid option')
continue
break
system(CLEAR_CONSOLE_COMMAND)
print('Unfollowing')
unfollowed = bot_instace.unfollow(quantity, ignore, all)
system(CLEAR_CONSOLE_COMMAND)
print(f'{unfollowed} unfollowed users. Press anything to return to menu.', end='')
input()
| 35.309315
| 207
| 0.498034
| 2,016
| 20,091
| 4.843254
| 0.078373
| 0.079885
| 0.126485
| 0.163867
| 0.834904
| 0.807251
| 0.799775
| 0.782978
| 0.769562
| 0.731872
| 0
| 0.00127
| 0.412075
| 20,091
| 568
| 208
| 35.371479
| 0.825347
| 0
| 0
| 0.818386
| 0
| 0.024664
| 0.222488
| 0.006819
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024664
| false
| 0.008969
| 0.008969
| 0
| 0.035874
| 0.210762
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
52e2c41544b3b3fc6a8b85c1092c82b5d2630833
| 11,234
|
py
|
Python
|
test/trace_processor/python/api_unittest.py
|
jumeder/perfetto
|
df3ae5e6f975204d2f35aeed61cbbd0746151d8e
|
[
"Apache-2.0"
] | 1
|
2021-01-18T09:36:54.000Z
|
2021-01-18T09:36:54.000Z
|
test/trace_processor/python/api_unittest.py
|
jumeder/perfetto
|
df3ae5e6f975204d2f35aeed61cbbd0746151d8e
|
[
"Apache-2.0"
] | 8
|
2020-12-04T22:03:54.000Z
|
2021-11-08T01:29:31.000Z
|
test/trace_processor/python/api_unittest.py
|
jumeder/perfetto
|
df3ae5e6f975204d2f35aeed61cbbd0746151d8e
|
[
"Apache-2.0"
] | 3
|
2019-02-10T12:40:29.000Z
|
2022-01-24T09:16:29.000Z
|
#!/usr/bin/env python3
# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from trace_processor.api import TraceProcessor, TraceProcessorException
from trace_processor.protos import ProtoFactory
class TestQueryResultIterator(unittest.TestCase):
# The numbers input into cells correspond the the CellType enum values
# defined under trace_processor.proto
CELL_VARINT = ProtoFactory().CellsBatch().CELL_VARINT
CELL_STRING = ProtoFactory().CellsBatch().CELL_STRING
CELL_INVALID = ProtoFactory().CellsBatch().CELL_INVALID
def test_one_batch(self):
int_values = [100, 200]
str_values = ['bar1', 'bar2']
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch.varint_cells.extend(int_values)
batch.string_cells = "\0".join(str_values) + "\0"
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch])
for num, row in enumerate(qr_iterator):
self.assertEqual(row.foo_id, str_values[num])
self.assertEqual(row.foo_num, int_values[num])
def test_many_batches(self):
int_values = [100, 200, 300, 400]
str_values = ['bar1', 'bar2', 'bar3', 'bar4']
batch_1 = ProtoFactory().CellsBatch()
batch_1.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch_1.varint_cells.extend(int_values[:2])
batch_1.string_cells = "\0".join(str_values[:2]) + "\0"
batch_1.is_last_batch = False
batch_2 = ProtoFactory().CellsBatch()
batch_2.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch_2.varint_cells.extend(int_values[2:])
batch_2.string_cells = "\0".join(str_values[2:]) + "\0"
batch_2.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch_1, batch_2])
for num, row in enumerate(qr_iterator):
self.assertEqual(row.foo_id, str_values[num])
self.assertEqual(row.foo_num, int_values[num])
def test_empty_batch(self):
batch = ProtoFactory().CellsBatch()
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator([], [batch])
for num, row in enumerate(qr_iterator):
self.assertIsNone(row.foo_id)
self.assertIsNone(row.foo_num)
def test_invalid_batch(self):
batch = ProtoFactory().CellsBatch()
qr_iterator = TraceProcessor.QueryResultIterator([], [batch])
# Since the batch isn't defined as the last batch, the QueryResultsIterator
# expects another batch and thus raises IndexError as no next batch exists.
with self.assertRaises(IndexError):
for row in qr_iterator:
pass
def test_incorrect_cells_batch(self):
str_values = ['bar1', 'bar2']
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch.string_cells = "\0".join(str_values) + "\0"
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch])
# The batch specifies there ought to be 2 cells of type VARINT and 2 cells
# of type STRING, but there are no string cells defined in the batch. Thus
# an IndexError occurs as it tries to access the empty string cells list.
with self.assertRaises(IndexError):
for row in qr_iterator:
pass
def test_incorrect_columns_batch(self):
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_VARINT, TestQueryResultIterator.CELL_VARINT
])
batch.varint_cells.extend([100, 200])
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(
['foo_id', 'foo_num', 'foo_dur', 'foo_ms'], [batch])
# It's always the case that the number of cells is a multiple of the number
# of columns. However, here this is clearly not the case, so when the
# iterator tries to access the cell for the third column, it raises an
# IndexError due to having exhausted the cells list.
with self.assertRaises(IndexError):
for row in qr_iterator:
pass
def test_invalid_cell_type(self):
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_INVALID,
TestQueryResultIterator.CELL_VARINT
])
batch.varint_cells.extend([100, 200])
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch])
# In this batch we declare the columns types to be CELL_INVALID,
# CELL_VARINT but that doesn't match the data which are both ints*
# so we should raise a TraceProcessorException.
with self.assertRaises(TraceProcessorException):
for row in qr_iterator:
pass
def test_one_batch_as_pandas(self):
int_values = [100, 200]
str_values = ['bar1', 'bar2']
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch.varint_cells.extend(int_values)
batch.string_cells = "\0".join(str_values) + "\0"
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch])
qr_df = qr_iterator.as_pandas_dataframe()
for num, row in qr_df.iterrows():
self.assertEqual(row['foo_id'], str_values[num])
self.assertEqual(row['foo_num'], int_values[num])
def test_many_batches_as_pandas(self):
int_values = [100, 200, 300, 400]
str_values = ['bar1', 'bar2', 'bar3', 'bar4']
batch_1 = ProtoFactory().CellsBatch()
batch_1.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch_1.varint_cells.extend(int_values[:2])
batch_1.string_cells = "\0".join(str_values[:2]) + "\0"
batch_1.is_last_batch = False
batch_2 = ProtoFactory().CellsBatch()
batch_2.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch_2.varint_cells.extend(int_values[2:])
batch_2.string_cells = "\0".join(str_values[2:]) + "\0"
batch_2.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch_1, batch_2])
qr_df = qr_iterator.as_pandas_dataframe()
for num, row in qr_df.iterrows():
self.assertEqual(row['foo_id'], str_values[num])
self.assertEqual(row['foo_num'], int_values[num])
def test_empty_batch_as_pandas(self):
batch = ProtoFactory().CellsBatch()
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator([], [batch])
qr_df = qr_iterator.as_pandas_dataframe()
for num, row in qr_df.iterrows():
self.assertEqual(row['foo_id'], str_values[num])
self.assertEqual(row['foo_num'], int_values[num])
def test_invalid_batch_as_pandas(self):
batch = ProtoFactory().CellsBatch()
qr_iterator = TraceProcessor.QueryResultIterator([], [batch])
# Since the batch isn't defined as the last batch, the QueryResultsIterator
# expects another batch and thus raises IndexError as no next batch exists.
with self.assertRaises(IndexError):
qr_df = qr_iterator.as_pandas_dataframe()
def test_incorrect_cells_batch_as_pandas(self):
str_values = ['bar1', 'bar2']
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_STRING,
TestQueryResultIterator.CELL_VARINT,
TestQueryResultIterator.CELL_STRING, TestQueryResultIterator.CELL_VARINT
])
batch.string_cells = "\0".join(str_values) + "\0"
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch])
# The batch specifies there ought to be 2 cells of type VARINT and 2 cells
# of type STRING, but there are no string cells defined in the batch. Thus
# an IndexError occurs as it tries to access the empty string cells list.
with self.assertRaises(IndexError):
qr_df = qr_iterator.as_pandas_dataframe()
def test_incorrect_columns_batch_as_pandas(self):
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_VARINT, TestQueryResultIterator.CELL_VARINT
])
batch.varint_cells.extend([100, 200])
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(
['foo_id', 'foo_num', 'foo_dur', 'foo_ms'], [batch])
# It's always the case that the number of cells is a multiple of the number
# of columns. However, here this is clearly not the case, so when the
# iterator tries to access the cell for the third column, it raises an
# IndexError due to having exhausted the cells list.
with self.assertRaises(IndexError):
qr_df = qr_iterator.as_pandas_dataframe()
def test_invalid_cell_type_as_pandas(self):
batch = ProtoFactory().CellsBatch()
batch.cells.extend([
TestQueryResultIterator.CELL_INVALID,
TestQueryResultIterator.CELL_VARINT
])
batch.varint_cells.extend([100, 200])
batch.is_last_batch = True
qr_iterator = TraceProcessor.QueryResultIterator(['foo_id', 'foo_num'],
[batch])
# In this batch we declare the columns types to be CELL_INVALID,
# CELL_VARINT but that doesn't match the data which are both ints*
# so we should raise a TraceProcessorException.
with self.assertRaises(TraceProcessorException):
qr_df = qr_iterator.as_pandas_dataframe()
| 38.341297
| 80
| 0.692273
| 1,370
| 11,234
| 5.462774
| 0.140876
| 0.144308
| 0.097007
| 0.119722
| 0.876136
| 0.863709
| 0.863041
| 0.855024
| 0.849412
| 0.844869
| 0
| 0.015492
| 0.212836
| 11,234
| 292
| 81
| 38.472603
| 0.830827
| 0.205092
| 0
| 0.883838
| 0
| 0
| 0.032733
| 0
| 0
| 0
| 0
| 0
| 0.10101
| 1
| 0.070707
| false
| 0.020202
| 0.015152
| 0
| 0.106061
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eac51a0c4c0cf25512b41069a1a81d7bc1a8b83f
| 23,105
|
py
|
Python
|
sdk/python/pulumi_vault/aws/auth_backend_client.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-10-07T17:44:18.000Z
|
2022-03-30T20:46:33.000Z
|
sdk/python/pulumi_vault/aws/auth_backend_client.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 79
|
2019-10-11T18:13:07.000Z
|
2022-03-31T21:09:41.000Z
|
sdk/python/pulumi_vault/aws/auth_backend_client.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-10-28T10:08:40.000Z
|
2020-03-17T14:20:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['AuthBackendClientArgs', 'AuthBackendClient']
@pulumi.input_type
class AuthBackendClientArgs:
def __init__(__self__, *,
access_key: Optional[pulumi.Input[str]] = None,
backend: Optional[pulumi.Input[str]] = None,
ec2_endpoint: Optional[pulumi.Input[str]] = None,
iam_endpoint: Optional[pulumi.Input[str]] = None,
iam_server_id_header_value: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
sts_endpoint: Optional[pulumi.Input[str]] = None,
sts_region: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AuthBackendClient resource.
:param pulumi.Input[str] access_key: The AWS access key that Vault should use for the
auth backend.
:param pulumi.Input[str] backend: The path the AWS auth backend being configured was
mounted at. Defaults to `aws`.
:param pulumi.Input[str] ec2_endpoint: Override the URL Vault uses when making EC2 API
calls.
:param pulumi.Input[str] iam_endpoint: Override the URL Vault uses when making IAM API
calls.
:param pulumi.Input[str] iam_server_id_header_value: The value to require in the
`X-Vault-AWS-IAM-Server-ID` header as part of `GetCallerIdentity` requests
that are used in the IAM auth method.
:param pulumi.Input[str] secret_key: The AWS secret key that Vault should use for the
auth backend.
:param pulumi.Input[str] sts_endpoint: Override the URL Vault uses when making STS API
calls.
:param pulumi.Input[str] sts_region: Override the default region when making STS API
calls. The `sts_endpoint` argument must be set when using `sts_region`.
"""
if access_key is not None:
pulumi.set(__self__, "access_key", access_key)
if backend is not None:
pulumi.set(__self__, "backend", backend)
if ec2_endpoint is not None:
pulumi.set(__self__, "ec2_endpoint", ec2_endpoint)
if iam_endpoint is not None:
pulumi.set(__self__, "iam_endpoint", iam_endpoint)
if iam_server_id_header_value is not None:
pulumi.set(__self__, "iam_server_id_header_value", iam_server_id_header_value)
if secret_key is not None:
pulumi.set(__self__, "secret_key", secret_key)
if sts_endpoint is not None:
pulumi.set(__self__, "sts_endpoint", sts_endpoint)
if sts_region is not None:
pulumi.set(__self__, "sts_region", sts_region)
@property
@pulumi.getter(name="accessKey")
def access_key(self) -> Optional[pulumi.Input[str]]:
"""
The AWS access key that Vault should use for the
auth backend.
"""
return pulumi.get(self, "access_key")
@access_key.setter
def access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_key", value)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
The path the AWS auth backend being configured was
mounted at. Defaults to `aws`.
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter(name="ec2Endpoint")
def ec2_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Override the URL Vault uses when making EC2 API
calls.
"""
return pulumi.get(self, "ec2_endpoint")
@ec2_endpoint.setter
def ec2_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_endpoint", value)
@property
@pulumi.getter(name="iamEndpoint")
def iam_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Override the URL Vault uses when making IAM API
calls.
"""
return pulumi.get(self, "iam_endpoint")
@iam_endpoint.setter
def iam_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_endpoint", value)
@property
@pulumi.getter(name="iamServerIdHeaderValue")
def iam_server_id_header_value(self) -> Optional[pulumi.Input[str]]:
"""
The value to require in the
`X-Vault-AWS-IAM-Server-ID` header as part of `GetCallerIdentity` requests
that are used in the IAM auth method.
"""
return pulumi.get(self, "iam_server_id_header_value")
@iam_server_id_header_value.setter
def iam_server_id_header_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_server_id_header_value", value)
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> Optional[pulumi.Input[str]]:
"""
The AWS secret key that Vault should use for the
auth backend.
"""
return pulumi.get(self, "secret_key")
@secret_key.setter
def secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_key", value)
@property
@pulumi.getter(name="stsEndpoint")
def sts_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Override the URL Vault uses when making STS API
calls.
"""
return pulumi.get(self, "sts_endpoint")
@sts_endpoint.setter
def sts_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sts_endpoint", value)
@property
@pulumi.getter(name="stsRegion")
def sts_region(self) -> Optional[pulumi.Input[str]]:
"""
Override the default region when making STS API
calls. The `sts_endpoint` argument must be set when using `sts_region`.
"""
return pulumi.get(self, "sts_region")
@sts_region.setter
def sts_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sts_region", value)
@pulumi.input_type
class _AuthBackendClientState:
def __init__(__self__, *,
access_key: Optional[pulumi.Input[str]] = None,
backend: Optional[pulumi.Input[str]] = None,
ec2_endpoint: Optional[pulumi.Input[str]] = None,
iam_endpoint: Optional[pulumi.Input[str]] = None,
iam_server_id_header_value: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
sts_endpoint: Optional[pulumi.Input[str]] = None,
sts_region: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AuthBackendClient resources.
:param pulumi.Input[str] access_key: The AWS access key that Vault should use for the
auth backend.
:param pulumi.Input[str] backend: The path the AWS auth backend being configured was
mounted at. Defaults to `aws`.
:param pulumi.Input[str] ec2_endpoint: Override the URL Vault uses when making EC2 API
calls.
:param pulumi.Input[str] iam_endpoint: Override the URL Vault uses when making IAM API
calls.
:param pulumi.Input[str] iam_server_id_header_value: The value to require in the
`X-Vault-AWS-IAM-Server-ID` header as part of `GetCallerIdentity` requests
that are used in the IAM auth method.
:param pulumi.Input[str] secret_key: The AWS secret key that Vault should use for the
auth backend.
:param pulumi.Input[str] sts_endpoint: Override the URL Vault uses when making STS API
calls.
:param pulumi.Input[str] sts_region: Override the default region when making STS API
calls. The `sts_endpoint` argument must be set when using `sts_region`.
"""
if access_key is not None:
pulumi.set(__self__, "access_key", access_key)
if backend is not None:
pulumi.set(__self__, "backend", backend)
if ec2_endpoint is not None:
pulumi.set(__self__, "ec2_endpoint", ec2_endpoint)
if iam_endpoint is not None:
pulumi.set(__self__, "iam_endpoint", iam_endpoint)
if iam_server_id_header_value is not None:
pulumi.set(__self__, "iam_server_id_header_value", iam_server_id_header_value)
if secret_key is not None:
pulumi.set(__self__, "secret_key", secret_key)
if sts_endpoint is not None:
pulumi.set(__self__, "sts_endpoint", sts_endpoint)
if sts_region is not None:
pulumi.set(__self__, "sts_region", sts_region)
@property
@pulumi.getter(name="accessKey")
def access_key(self) -> Optional[pulumi.Input[str]]:
"""
The AWS access key that Vault should use for the
auth backend.
"""
return pulumi.get(self, "access_key")
@access_key.setter
def access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_key", value)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
The path the AWS auth backend being configured was
mounted at. Defaults to `aws`.
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter(name="ec2Endpoint")
def ec2_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Override the URL Vault uses when making EC2 API
calls.
"""
return pulumi.get(self, "ec2_endpoint")
@ec2_endpoint.setter
def ec2_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_endpoint", value)
@property
@pulumi.getter(name="iamEndpoint")
def iam_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Override the URL Vault uses when making IAM API
calls.
"""
return pulumi.get(self, "iam_endpoint")
@iam_endpoint.setter
def iam_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_endpoint", value)
@property
@pulumi.getter(name="iamServerIdHeaderValue")
def iam_server_id_header_value(self) -> Optional[pulumi.Input[str]]:
"""
The value to require in the
`X-Vault-AWS-IAM-Server-ID` header as part of `GetCallerIdentity` requests
that are used in the IAM auth method.
"""
return pulumi.get(self, "iam_server_id_header_value")
@iam_server_id_header_value.setter
def iam_server_id_header_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_server_id_header_value", value)
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> Optional[pulumi.Input[str]]:
"""
The AWS secret key that Vault should use for the
auth backend.
"""
return pulumi.get(self, "secret_key")
@secret_key.setter
def secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_key", value)
@property
@pulumi.getter(name="stsEndpoint")
def sts_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Override the URL Vault uses when making STS API
calls.
"""
return pulumi.get(self, "sts_endpoint")
@sts_endpoint.setter
def sts_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sts_endpoint", value)
@property
@pulumi.getter(name="stsRegion")
def sts_region(self) -> Optional[pulumi.Input[str]]:
"""
Override the default region when making STS API
calls. The `sts_endpoint` argument must be set when using `sts_region`.
"""
return pulumi.get(self, "sts_region")
@sts_region.setter
def sts_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sts_region", value)
class AuthBackendClient(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_key: Optional[pulumi.Input[str]] = None,
backend: Optional[pulumi.Input[str]] = None,
ec2_endpoint: Optional[pulumi.Input[str]] = None,
iam_endpoint: Optional[pulumi.Input[str]] = None,
iam_server_id_header_value: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
sts_endpoint: Optional[pulumi.Input[str]] = None,
sts_region: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Import
AWS auth backend clients can be imported using `auth/`, the `backend` path, and `/config/client` e.g.
```sh
$ pulumi import vault:aws/authBackendClient:AuthBackendClient example auth/aws/config/client
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_key: The AWS access key that Vault should use for the
auth backend.
:param pulumi.Input[str] backend: The path the AWS auth backend being configured was
mounted at. Defaults to `aws`.
:param pulumi.Input[str] ec2_endpoint: Override the URL Vault uses when making EC2 API
calls.
:param pulumi.Input[str] iam_endpoint: Override the URL Vault uses when making IAM API
calls.
:param pulumi.Input[str] iam_server_id_header_value: The value to require in the
`X-Vault-AWS-IAM-Server-ID` header as part of `GetCallerIdentity` requests
that are used in the IAM auth method.
:param pulumi.Input[str] secret_key: The AWS secret key that Vault should use for the
auth backend.
:param pulumi.Input[str] sts_endpoint: Override the URL Vault uses when making STS API
calls.
:param pulumi.Input[str] sts_region: Override the default region when making STS API
calls. The `sts_endpoint` argument must be set when using `sts_region`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[AuthBackendClientArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
AWS auth backend clients can be imported using `auth/`, the `backend` path, and `/config/client` e.g.
```sh
$ pulumi import vault:aws/authBackendClient:AuthBackendClient example auth/aws/config/client
```
:param str resource_name: The name of the resource.
:param AuthBackendClientArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AuthBackendClientArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_key: Optional[pulumi.Input[str]] = None,
backend: Optional[pulumi.Input[str]] = None,
ec2_endpoint: Optional[pulumi.Input[str]] = None,
iam_endpoint: Optional[pulumi.Input[str]] = None,
iam_server_id_header_value: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
sts_endpoint: Optional[pulumi.Input[str]] = None,
sts_region: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AuthBackendClientArgs.__new__(AuthBackendClientArgs)
__props__.__dict__["access_key"] = access_key
__props__.__dict__["backend"] = backend
__props__.__dict__["ec2_endpoint"] = ec2_endpoint
__props__.__dict__["iam_endpoint"] = iam_endpoint
__props__.__dict__["iam_server_id_header_value"] = iam_server_id_header_value
__props__.__dict__["secret_key"] = secret_key
__props__.__dict__["sts_endpoint"] = sts_endpoint
__props__.__dict__["sts_region"] = sts_region
super(AuthBackendClient, __self__).__init__(
'vault:aws/authBackendClient:AuthBackendClient',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_key: Optional[pulumi.Input[str]] = None,
backend: Optional[pulumi.Input[str]] = None,
ec2_endpoint: Optional[pulumi.Input[str]] = None,
iam_endpoint: Optional[pulumi.Input[str]] = None,
iam_server_id_header_value: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
sts_endpoint: Optional[pulumi.Input[str]] = None,
sts_region: Optional[pulumi.Input[str]] = None) -> 'AuthBackendClient':
"""
Get an existing AuthBackendClient resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_key: The AWS access key that Vault should use for the
auth backend.
:param pulumi.Input[str] backend: The path the AWS auth backend being configured was
mounted at. Defaults to `aws`.
:param pulumi.Input[str] ec2_endpoint: Override the URL Vault uses when making EC2 API
calls.
:param pulumi.Input[str] iam_endpoint: Override the URL Vault uses when making IAM API
calls.
:param pulumi.Input[str] iam_server_id_header_value: The value to require in the
`X-Vault-AWS-IAM-Server-ID` header as part of `GetCallerIdentity` requests
that are used in the IAM auth method.
:param pulumi.Input[str] secret_key: The AWS secret key that Vault should use for the
auth backend.
:param pulumi.Input[str] sts_endpoint: Override the URL Vault uses when making STS API
calls.
:param pulumi.Input[str] sts_region: Override the default region when making STS API
calls. The `sts_endpoint` argument must be set when using `sts_region`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AuthBackendClientState.__new__(_AuthBackendClientState)
__props__.__dict__["access_key"] = access_key
__props__.__dict__["backend"] = backend
__props__.__dict__["ec2_endpoint"] = ec2_endpoint
__props__.__dict__["iam_endpoint"] = iam_endpoint
__props__.__dict__["iam_server_id_header_value"] = iam_server_id_header_value
__props__.__dict__["secret_key"] = secret_key
__props__.__dict__["sts_endpoint"] = sts_endpoint
__props__.__dict__["sts_region"] = sts_region
return AuthBackendClient(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessKey")
def access_key(self) -> pulumi.Output[Optional[str]]:
"""
The AWS access key that Vault should use for the
auth backend.
"""
return pulumi.get(self, "access_key")
@property
@pulumi.getter
def backend(self) -> pulumi.Output[Optional[str]]:
"""
The path the AWS auth backend being configured was
mounted at. Defaults to `aws`.
"""
return pulumi.get(self, "backend")
@property
@pulumi.getter(name="ec2Endpoint")
def ec2_endpoint(self) -> pulumi.Output[Optional[str]]:
"""
Override the URL Vault uses when making EC2 API
calls.
"""
return pulumi.get(self, "ec2_endpoint")
@property
@pulumi.getter(name="iamEndpoint")
def iam_endpoint(self) -> pulumi.Output[Optional[str]]:
"""
Override the URL Vault uses when making IAM API
calls.
"""
return pulumi.get(self, "iam_endpoint")
@property
@pulumi.getter(name="iamServerIdHeaderValue")
def iam_server_id_header_value(self) -> pulumi.Output[Optional[str]]:
"""
The value to require in the
`X-Vault-AWS-IAM-Server-ID` header as part of `GetCallerIdentity` requests
that are used in the IAM auth method.
"""
return pulumi.get(self, "iam_server_id_header_value")
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> pulumi.Output[Optional[str]]:
"""
The AWS secret key that Vault should use for the
auth backend.
"""
return pulumi.get(self, "secret_key")
@property
@pulumi.getter(name="stsEndpoint")
def sts_endpoint(self) -> pulumi.Output[Optional[str]]:
"""
Override the URL Vault uses when making STS API
calls.
"""
return pulumi.get(self, "sts_endpoint")
@property
@pulumi.getter(name="stsRegion")
def sts_region(self) -> pulumi.Output[Optional[str]]:
"""
Override the default region when making STS API
calls. The `sts_endpoint` argument must be set when using `sts_region`.
"""
return pulumi.get(self, "sts_region")
| 41.856884
| 134
| 0.634235
| 2,830
| 23,105
| 4.944523
| 0.061484
| 0.0849
| 0.106053
| 0.113199
| 0.880798
| 0.870864
| 0.865576
| 0.858501
| 0.851997
| 0.836204
| 0
| 0.002485
| 0.268513
| 23,105
| 551
| 135
| 41.932849
| 0.825454
| 0.320147
| 0
| 0.83391
| 1
| 0
| 0.098579
| 0.025882
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16263
| false
| 0.00346
| 0.017301
| 0
| 0.276817
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eac974d88a94a374d3a16673bf0be5690fccfc38
| 1,050
|
py
|
Python
|
dt_stripe/managers.py
|
itsnamgyu/api-demo
|
ddf726928bd7f1021143c4dbb530e3017a3edda9
|
[
"MIT"
] | 1
|
2019-06-02T08:20:38.000Z
|
2019-06-02T08:20:38.000Z
|
dt_stripe/managers.py
|
itsnamgyu/api-demo
|
ddf726928bd7f1021143c4dbb530e3017a3edda9
|
[
"MIT"
] | 12
|
2019-07-21T18:40:35.000Z
|
2019-10-05T19:57:49.000Z
|
dt_stripe/managers.py
|
itsnamgyu/django-template
|
20f64974e0dda69cf8dcf0dac9e0a309f200fb61
|
[
"MIT"
] | null | null | null |
from django.apps import apps
from django.db import models
class ServiceManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(product_type="service")
class GoodManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(product_type="good")
class PlanManager(models.Manager):
def get_queryset(self):
return super().get_queryset()
def subscribed_by(self, customer):
Subscription = apps.get_model("dt_stripe", "Subscription")
return self.get_queryset().filter(
subscriptions__in=Subscription.objects.filter(
customer=customer, status=Subscription.SUBSCRIPTION_ACTIVE
)
)
def not_subscribed_by(self, customer):
Subscription = apps.get_model("dt_stripe", "Subscription")
return self.get_queryset().exclude(
subscriptions__in=Subscription.objects.filter(
customer=customer, status=Subscription.SUBSCRIPTION_ACTIVE
)
)
| 30.882353
| 74
| 0.67619
| 111
| 1,050
| 6.189189
| 0.324324
| 0.128093
| 0.069869
| 0.082969
| 0.80786
| 0.80786
| 0.80786
| 0.80786
| 0.80786
| 0.80786
| 0
| 0
| 0.222857
| 1,050
| 33
| 75
| 31.818182
| 0.841912
| 0
| 0
| 0.36
| 0
| 0
| 0.050476
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.08
| 0.12
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
d8134620d1d4e2ae84dd1fde2b57220b82314f3a
| 120
|
py
|
Python
|
discord/state.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/state.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/state.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
from disnake.state import *
from disnake.state import __dict__ as __original_dict__
locals().update(__original_dict__)
| 24
| 55
| 0.833333
| 16
| 120
| 5.375
| 0.5625
| 0.255814
| 0.372093
| 0.511628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 120
| 4
| 56
| 30
| 0.796296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dc5c8d18d6f732ec5d82ba0578235665dd684ccd
| 25,337
|
py
|
Python
|
util/data/gen/CRYPT32.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
util/data/gen/CRYPT32.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
util/data/gen/CRYPT32.dll.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
symbols = []
exports = [{'type': 'function', 'name': 'CertAddCRLContextToStore', 'address': '0x7ffb3be3ff70'}, {'type': 'function', 'name': 'CertAddCRLLinkToStore', 'address': '0x7ffb3be3fff0'}, {'type': 'function', 'name': 'CertAddCTLContextToStore', 'address': '0x7ffb3be3ff70'}, {'type': 'function', 'name': 'CertAddCTLLinkToStore', 'address': '0x7ffb3be3fff0'}, {'type': 'function', 'name': 'CertAddCertificateContextToStore', 'address': '0x7ffb3bdf2610'}, {'type': 'function', 'name': 'CertAddCertificateLinkToStore', 'address': '0x7ffb3be3fff0'}, {'type': 'function', 'name': 'CertAddEncodedCRLToStore', 'address': '0x7ffb3be0a730'}, {'type': 'function', 'name': 'CertAddEncodedCTLToStore', 'address': '0x7ffb3bdc9830'}, {'type': 'function', 'name': 'CertAddEncodedCertificateToStore', 'address': '0x7ffb3bddad60'}, {'type': 'function', 'name': 'CertAddEncodedCertificateToSystemStoreA', 'address': '0x7ffb3be41060'}, {'type': 'function', 'name': 'CertAddEncodedCertificateToSystemStoreW', 'address': '0x7ffb3be410e0'}, {'type': 'function', 'name': 'CertAddEnhancedKeyUsageIdentifier', 'address': '0x7ffb3be41800'}, {'type': 'function', 'name': 'CertAddRefServerOcspResponse', 'address': '0x7ffb3be431e0'}, {'type': 'function', 'name': 'CertAddRefServerOcspResponseContext', 'address': '0x7ffb3be43200'}, {'type': 'function', 'name': 'CertAddSerializedElementToStore', 'address': '0x7ffb3bdd1430'}, {'type': 'function', 'name': 'CertAddStoreToCollection', 'address': '0x7ffb3bdf1830'}, {'type': 'function', 'name': 'CertAlgIdToOID', 'address': '0x7ffb3be43a10'}, {'type': 'function', 'name': 'CertCloseServerOcspResponse', 'address': '0x7ffb3be43220'}, {'type': 'function', 'name': 'CertCloseStore', 'address': '0x7ffb3bddc9a0'}, {'type': 'function', 'name': 'CertCompareCertificate', 'address': '0x7ffb3be08340'}, {'type': 'function', 'name': 'CertCompareCertificateName', 'address': '0x7ffb3bdf2b40'}, {'type': 'function', 'name': 'CertCompareIntegerBlob', 'address': '0x7ffb3be43a50'}, {'type': 'function', 'name': 'CertComparePublicKeyInfo', 'address': '0x7ffb3bde2a90'}, {'type': 'function', 'name': 'CertControlStore', 'address': '0x7ffb3bdcd6c0'}, {'type': 'function', 'name': 'CertCreateCRLContext', 'address': '0x7ffb3be0a6f0'}, {'type': 'function', 'name': 'CertCreateCTLContext', 'address': '0x7ffb3bdc97f0'}, {'type': 'function', 'name': 'CertCreateCTLEntryFromCertificateContextProperties', 'address': '0x7ffb3be40050'}, {'type': 'function', 'name': 'CertCreateCertificateChainEngine', 'address': '0x7ffb3be90830'}, {'type': 'function', 'name': 'CertCreateCertificateContext', 'address': '0x7ffb3be06c40'}, {'type': 'function', 'name': 'CertCreateContext', 'address': '0x7ffb3bdced90'}, {'type': 'function', 'name': 'CertCreateSelfSignCertificate', 'address': '0x7ffb3be44020'}, {'type': 'function', 'name': 'CertDeleteCRLFromStore', 'address': '0x7ffb3be40590'}, {'type': 'function', 'name': 'CertDeleteCTLFromStore', 'address': '0x7ffb3be40590'}, {'type': 'function', 'name': 'CertDeleteCertificateFromStore', 'address': '0x7ffb3be40590'}, {'type': 'function', 'name': 'CertDuplicateCRLContext', 'address': '0x7ffb3bdf9eb0'}, {'type': 'function', 'name': 'CertDuplicateCTLContext', 'address': '0x7ffb3bdf9eb0'}, {'type': 'function', 'name': 'CertDuplicateCertificateChain', 'address': '0x7ffb3be0b0c0'}, {'type': 'function', 'name': 'CertDuplicateCertificateContext', 'address': '0x7ffb3bdd2b90'}, {'type': 'function', 'name': 'CertDuplicateStore', 'address': '0x7ffb3be0afe0'}, {'type': 'function', 'name': 'CertEnumCRLContextProperties', 'address': '0x7ffb3be405b0'}, {'type': 'function', 'name': 'CertEnumCRLsInStore', 'address': '0x7ffb3bdc65c0'}, {'type': 'function', 'name': 'CertEnumCTLContextProperties', 'address': '0x7ffb3be405b0'}, {'type': 'function', 'name': 'CertEnumCTLsInStore', 'address': '0x7ffb3bdf4380'}, {'type': 'function', 'name': 'CertEnumCertificateContextProperties', 'address': '0x7ffb3be405b0'}, {'type': 'function', 'name': 'CertEnumCertificatesInStore', 'address': '0x7ffb3bdce1f0'}, {'type': 'function', 'name': 'CertEnumPhysicalStore', 'address': '0x7ffb3be48d50'}, {'type': 'function', 'name': 'CertEnumSubjectInSortedCTL', 'address': '0x7ffb3be405d0'}, {'type': 'function', 'name': 'CertEnumSystemStore', 'address': '0x7ffb3be48d70'}, {'type': 'function', 'name': 'CertEnumSystemStoreLocation', 'address': '0x7ffb3be49100'}, {'type': 'function', 'name': 'CertFindAttribute', 'address': '0x7ffb3bdf8af0'}, {'type': 'function', 'name': 'CertFindCRLInStore', 'address': '0x7ffb3be06d20'}, {'type': 'function', 'name': 'CertFindCTLInStore', 'address': '0x7ffb3be40740'}, {'type': 'function', 'name': 'CertFindCertificateInCRL', 'address': '0x7ffb3be0bec0'}, {'type': 'function', 'name': 'CertFindCertificateInStore', 'address': '0x7ffb3bdcf140'}, {'type': 'function', 'name': 'CertFindChainInStore', 'address': '0x7ffb3be49e80'}, {'type': 'function', 'name': 'CertFindExtension', 'address': '0x7ffb3bdf39d0'}, {'type': 'function', 'name': 'CertFindRDNAttr', 'address': '0x7ffb3be43a70'}, {'type': 'function', 'name': 'CertFindSubjectInCTL', 'address': '0x7ffb3bdf9650'}, {'type': 'function', 'name': 'CertFindSubjectInSortedCTL', 'address': '0x7ffb3bdecb30'}, {'type': 'function', 'name': 'CertFreeCRLContext', 'address': '0x7ffb3be0b740'}, {'type': 'function', 'name': 'CertFreeCTLContext', 'address': '0x7ffb3be0b740'}, {'type': 'function', 'name': 'CertFreeCertificateChain', 'address': '0x7ffb3bdf8ef0'}, {'type': 'function', 'name': 'CertFreeCertificateChainEngine', 'address': '0x7ffb3be90840'}, {'type': 'function', 'name': 'CertFreeCertificateChainList', 'address': '0x7ffb3be4a250'}, {'type': 'function', 'name': 'CertFreeCertificateContext', 'address': '0x7ffb3bdd9260'}, {'type': 'function', 'name': 'CertFreeServerOcspResponseContext', 'address': '0x7ffb3be43290'}, {'type': 'function', 'name': 'CertGetCRLContextProperty', 'address': '0x7ffb3bde09d0'}, {'type': 'function', 'name': 'CertGetCRLFromStore', 'address': '0x7ffb3be407a0'}, {'type': 'function', 'name': 'CertGetCTLContextProperty', 'address': '0x7ffb3bde09d0'}, {'type': 'function', 'name': 'CertGetCertificateChain', 'address': '0x7ffb3bdd4c80'}, {'type': 'function', 'name': 'CertGetCertificateContextProperty', 'address': '0x7ffb3bde09d0'}, {'type': 'function', 'name': 'CertGetEnhancedKeyUsage', 'address': '0x7ffb3bdf3d40'}, {'type': 'function', 'name': 'CertGetIntendedKeyUsage', 'address': '0x7ffb3bdf43e0'}, {'type': 'function', 'name': 'CertGetIssuerCertificateFromStore', 'address': '0x7ffb3be40890'}, {'type': 'function', 'name': 'CertGetNameStringA', 'address': '0x7ffb3be4ae00'}, {'type': 'function', 'name': 'CertGetNameStringW', 'address': '0x7ffb3bdf2d50'}, {'type': 'function', 'name': 'CertGetPublicKeyLength', 'address': '0x7ffb3bdf4c50'}, {'type': 'function', 'name': 'CertGetServerOcspResponseContext', 'address': '0x7ffb3be432e0'}, {'type': 'function', 'name': 'CertGetStoreProperty', 'address': '0x7ffb3be40990'}, {'type': 'function', 'name': 'CertGetSubjectCertificateFromStore', 'address': '0x7ffb3be0aaa0'}, {'type': 'function', 'name': 'CertGetValidUsages', 'address': '0x7ffb3bdf33f0'}, {'type': 'function', 'name': 'CertIsRDNAttrsInCertificateName', 'address': '0x7ffb3be43ae0'}, {'type': 'function', 'name': 'CertIsStrongHashToSign', 'address': '0x7ffb3be4cee0'}, {'type': 'function', 'name': 'CertIsValidCRLForCertificate', 'address': '0x7ffb3bdc7b40'}, {'type': 'function', 'name': 'CertIsWeakHash', 'address': '0x7ffb3bdd6c20'}, {'type': 'function', 'name': 'CertNameToStrA', 'address': '0x7ffb3be0db70'}, {'type': 'function', 'name': 'CertNameToStrW', 'address': '0x7ffb3be091b0'}, {'type': 'function', 'name': 'CertOIDToAlgId', 'address': '0x7ffb3bdf9830'}, {'type': 'function', 'name': 'CertOpenServerOcspResponse', 'address': '0x7ffb3be433c0'}, {'type': 'function', 'name': 'CertOpenStore', 'address': '0x7ffb3bde86b0'}, {'type': 'function', 'name': 'CertOpenSystemStoreA', 'address': '0x7ffb3be41160'}, {'type': 'function', 'name': 'CertOpenSystemStoreW', 'address': '0x7ffb3be411e0'}, {'type': 'function', 'name': 'CertRDNValueToStrA', 'address': '0x7ffb3be4aec0'}, {'type': 'function', 'name': 'CertRDNValueToStrW', 'address': '0x7ffb3be4af90'}, {'type': 'function', 'name': 'CertRegisterPhysicalStore', 'address': '0x7ffb3be49200'}, {'type': 'function', 'name': 'CertRegisterSystemStore', 'address': '0x7ffb3be494c0'}, {'type': 'function', 'name': 'CertRemoveEnhancedKeyUsageIdentifier', 'address': '0x7ffb3be419d0'}, {'type': 'function', 'name': 'CertRemoveStoreFromCollection', 'address': '0x7ffb3be0a370'}, {'type': 'function', 'name': 'CertResyncCertificateChainEngine', 'address': '0x7ffb3be90880'}, {'type': 'function', 'name': 'CertRetrieveLogoOrBiometricInfo', 'address': '0x7ffb3be4d240'}, {'type': 'function', 'name': 'CertSaveStore', 'address': '0x7ffb3be08a50'}, {'type': 'function', 'name': 'CertSelectCertificateChains', 'address': '0x7ffb3be4a270'}, {'type': 'function', 'name': 'CertSerializeCRLStoreElement', 'address': '0x7ffb3be076b0'}, {'type': 'function', 'name': 'CertSerializeCTLStoreElement', 'address': '0x7ffb3be076b0'}, {'type': 'function', 'name': 'CertSerializeCertificateStoreElement', 'address': '0x7ffb3be076b0'}, {'type': 'function', 'name': 'CertSetCRLContextProperty', 'address': '0x7ffb3be0a6c0'}, {'type': 'function', 'name': 'CertSetCTLContextProperty', 'address': '0x7ffb3be0a6c0'}, {'type': 'function', 'name': 'CertSetCertificateContextPropertiesFromCTLEntry', 'address': '0x7ffb3bdc8080'}, {'type': 'function', 'name': 'CertSetCertificateContextProperty', 'address': '0x7ffb3be0a6c0'}, {'type': 'function', 'name': 'CertSetEnhancedKeyUsage', 'address': '0x7ffb3be41ad0'}, {'type': 'function', 'name': 'CertSetStoreProperty', 'address': '0x7ffb3be40b90'}, {'type': 'function', 'name': 'CertStrToNameA', 'address': '0x7ffb3be4b140'}, {'type': 'function', 'name': 'CertStrToNameW', 'address': '0x7ffb3bdc66c0'}, {'type': 'function', 'name': 'CertUnregisterPhysicalStore', 'address': '0x7ffb3be495d0'}, {'type': 'function', 'name': 'CertUnregisterSystemStore', 'address': '0x7ffb3be496f0'}, {'type': 'function', 'name': 'CertVerifyCRLRevocation', 'address': '0x7ffb3be43d00'}, {'type': 'function', 'name': 'CertVerifyCRLTimeValidity', 'address': '0x7ffb3be43d90'}, {'type': 'function', 'name': 'CertVerifyCTLUsage', 'address': '0x7ffb3be4da60'}, {'type': 'function', 'name': 'CertVerifyCertificateChainPolicy', 'address': '0x7ffb3bde4aa0'}, {'type': 'function', 'name': 'CertVerifyRevocation', 'address': '0x7ffb3bdf4790'}, {'type': 'function', 'name': 'CertVerifySubjectCertificateContext', 'address': '0x7ffb3be40c10'}, {'type': 'function', 'name': 'CertVerifyTimeValidity', 'address': '0x7ffb3bdf0c40'}, {'type': 'function', 'name': 'CertVerifyValidityNesting', 'address': '0x7ffb3be43e50'}, {'type': 'function', 'name': 'CryptAcquireCertificatePrivateKey', 'address': '0x7ffb3be0e510'}, {'type': 'function', 'name': 'CryptBinaryToStringA', 'address': '0x7ffb3bdd0b80'}, {'type': 'function', 'name': 'CryptBinaryToStringW', 'address': '0x7ffb3bdd0a40'}, {'type': 'function', 'name': 'CryptCloseAsyncHandle', 'address': '0x7ffb3be4ec60'}, {'type': 'function', 'name': 'CryptCreateAsyncHandle', 'address': '0x7ffb3be4ec90'}, {'type': 'function', 'name': 'CryptCreateKeyIdentifierFromCSP', 'address': '0x7ffb3be4c3e0'}, {'type': 'function', 'name': 'CryptDecodeMessage', 'address': '0x7ffb3be50690'}, {'type': 'function', 'name': 'CryptDecodeObject', 'address': '0x7ffb3bde2e90'}, {'type': 'function', 'name': 'CryptDecodeObjectEx', 'address': '0x7ffb3bde2ed0'}, {'type': 'function', 'name': 'CryptDecryptAndVerifyMessageSignature', 'address': '0x7ffb3be50730'}, {'type': 'function', 'name': 'CryptDecryptMessage', 'address': '0x7ffb3be50960'}, {'type': 'function', 'name': 'CryptEncodeObject', 'address': '0x7ffb3be0b490'}, {'type': 'function', 'name': 'CryptEncodeObjectEx', 'address': '0x7ffb3bde4360'}, {'type': 'function', 'name': 'CryptEncryptMessage', 'address': '0x7ffb3be509d0'}, {'type': 'function', 'name': 'CryptEnumKeyIdentifierProperties', 'address': '0x7ffb3be40c60'}, {'type': 'function', 'name': 'CryptEnumOIDFunction', 'address': '0x7ffb3bdeefd0'}, {'type': 'function', 'name': 'CryptEnumOIDInfo', 'address': '0x7ffb3be5f9e0'}, {'type': 'function', 'name': 'CryptExportPKCS8', 'address': '0x7ffb3be9b7d0'}, {'type': 'function', 'name': 'CryptExportPublicKeyInfo', 'address': '0x7ffb3be4c470'}, {'type': 'function', 'name': 'CryptExportPublicKeyInfoEx', 'address': '0x7ffb3be0e8b0'}, {'type': 'function', 'name': 'CryptExportPublicKeyInfoFromBCryptKeyHandle', 'address': '0x7ffb3be4c4b0'}, {'type': 'function', 'name': 'CryptFindCertificateKeyProvInfo', 'address': '0x7ffb3be43eb0'}, {'type': 'function', 'name': 'CryptFindLocalizedName', 'address': '0x7ffb3be5fae0'}, {'type': 'function', 'name': 'CryptFindOIDInfo', 'address': '0x7ffb3bde2690'}, {'type': 'function', 'name': 'CryptFormatObject', 'address': '0x7ffb3be6b2c0'}, {'type': 'function', 'name': 'CryptFreeOIDFunctionAddress', 'address': '0x7ffb3bdf4ef0'}, {'type': 'function', 'name': 'CryptGetAsyncParam', 'address': '0x7ffb3be4ecc0'}, {'type': 'function', 'name': 'CryptGetDefaultOIDDllList', 'address': '0x7ffb3bdf5270'}, {'type': 'function', 'name': 'CryptGetDefaultOIDFunctionAddress', 'address': '0x7ffb3bdf5320'}, {'type': 'function', 'name': 'CryptGetKeyIdentifierProperty', 'address': '0x7ffb3be40d60'}, {'type': 'function', 'name': 'CryptGetMessageCertificates', 'address': '0x7ffb3be50ab0'}, {'type': 'function', 'name': 'CryptGetMessageSignerCount', 'address': '0x7ffb3be50af0'}, {'type': 'function', 'name': 'CryptGetOIDFunctionAddress', 'address': '0x7ffb3bde4790'}, {'type': 'function', 'name': 'CryptGetOIDFunctionValue', 'address': '0x7ffb3be5dd80'}, {'type': 'function', 'name': 'CryptHashCertificate', 'address': '0x7ffb3bdd1c20'}, {'type': 'function', 'name': 'CryptHashCertificate2', 'address': '0x7ffb3bddb070'}, {'type': 'function', 'name': 'CryptHashMessage', 'address': '0x7ffb3be50ba0'}, {'type': 'function', 'name': 'CryptHashPublicKeyInfo', 'address': '0x7ffb3bdc3d20'}, {'type': 'function', 'name': 'CryptHashToBeSigned', 'address': '0x7ffb3bde1ff0'}, {'type': 'function', 'name': 'CryptImportPKCS8', 'address': '0x7ffb3be9bbd0'}, {'type': 'function', 'name': 'CryptImportPublicKeyInfo', 'address': '0x7ffb3bdc6470'}, {'type': 'function', 'name': 'CryptImportPublicKeyInfoEx', 'address': '0x7ffb3bdf4d60'}, {'type': 'function', 'name': 'CryptImportPublicKeyInfoEx2', 'address': '0x7ffb3bde3400'}, {'type': 'function', 'name': 'CryptInitOIDFunctionSet', 'address': '0x7ffb3be00d80'}, {'type': 'function', 'name': 'CryptInstallDefaultContext', 'address': '0x7ffb3be712d0'}, {'type': 'function', 'name': 'CryptInstallOIDFunctionAddress', 'address': '0x7ffb3be00d10'}, {'type': 'function', 'name': 'CryptLoadSip', 'address': '0x7ffb3beb00f0'}, {'type': 'function', 'name': 'CryptMemAlloc', 'address': '0x7ffb3be065f0'}, {'type': 'function', 'name': 'CryptMemFree', 'address': '0x7ffb3bde1d10'}, {'type': 'function', 'name': 'CryptMemRealloc', 'address': '0x7ffb3be71760'}, {'type': 'function', 'name': 'CryptMsgCalculateEncodedLength', 'address': '0x7ffb3beb20b0'}, {'type': 'function', 'name': 'CryptMsgClose', 'address': '0x7ffb3bde1980'}, {'type': 'function', 'name': 'CryptMsgControl', 'address': '0x7ffb3be02540'}, {'type': 'function', 'name': 'CryptMsgCountersign', 'address': '0x7ffb3bebce40'}, {'type': 'function', 'name': 'CryptMsgCountersignEncoded', 'address': '0x7ffb3bebcfe0'}, {'type': 'function', 'name': 'CryptMsgDuplicate', 'address': '0x7ffb3be0cb60'}, {'type': 'function', 'name': 'CryptMsgEncodeAndSignCTL', 'address': '0x7ffb3bebd7c0'}, {'type': 'function', 'name': 'CryptMsgGetAndVerifySigner', 'address': '0x7ffb3bdc92b0'}, {'type': 'function', 'name': 'CryptMsgGetParam', 'address': '0x7ffb3bdd5270'}, {'type': 'function', 'name': 'CryptMsgOpenToDecode', 'address': '0x7ffb3bdf5060'}, {'type': 'function', 'name': 'CryptMsgOpenToEncode', 'address': '0x7ffb3bebd380'}, {'type': 'function', 'name': 'CryptMsgSignCTL', 'address': '0x7ffb3bebd8c0'}, {'type': 'function', 'name': 'CryptMsgUpdate', 'address': '0x7ffb3bde8c10'}, {'type': 'function', 'name': 'CryptMsgVerifyCountersignatureEncoded', 'address': '0x7ffb3bebd490'}, {'type': 'function', 'name': 'CryptMsgVerifyCountersignatureEncodedEx', 'address': '0x7ffb3bebd4f0'}, {'type': 'function', 'name': 'CryptObjectLocatorFree', 'address': '0x7ffb3be97f70'}, {'type': 'function', 'name': 'CryptObjectLocatorGet', 'address': '0x7ffb3be97fc0'}, {'type': 'function', 'name': 'CryptObjectLocatorGetContent', 'address': '0x7ffb3be98000'}, {'type': 'function', 'name': 'CryptObjectLocatorGetUpdated', 'address': '0x7ffb3be980c0'}, {'type': 'function', 'name': 'CryptObjectLocatorInitialize', 'address': '0x7ffb3be98110'}, {'type': 'function', 'name': 'CryptObjectLocatorIsChanged', 'address': '0x7ffb3be98490'}, {'type': 'function', 'name': 'CryptObjectLocatorRelease', 'address': '0x7ffb3be984c0'}, {'type': 'function', 'name': 'CryptProtectData', 'address': '0x7ffb3be0c5f0'}, {'type': 'function', 'name': 'CryptQueryObject', 'address': '0x7ffb3bdc1e00'}, {'type': 'function', 'name': 'CryptRegisterDefaultOIDFunction', 'address': '0x7ffb3be5de80'}, {'type': 'function', 'name': 'CryptRegisterOIDFunction', 'address': '0x7ffb3be5e1a0'}, {'type': 'function', 'name': 'CryptRegisterOIDInfo', 'address': '0x7ffb3be5fb90'}, {'type': 'function', 'name': 'CryptRetrieveTimeStamp', 'address': '0x7ffb3beb07e0'}, {'type': 'function', 'name': 'CryptSIPAddProvider', 'address': '0x7ffb3beb01a0'}, {'type': 'function', 'name': 'CryptSIPCreateIndirectData', 'address': '0x7ffb3bdf5730'}, {'type': 'function', 'name': 'CryptSIPGetCaps', 'address': '0x7ffb3bdf6440'}, {'type': 'function', 'name': 'CryptSIPGetSealedDigest', 'address': '0x7ffb3beb0530'}, {'type': 'function', 'name': 'CryptSIPGetSignedDataMsg', 'address': '0x7ffb3be0c180'}, {'type': 'function', 'name': 'CryptSIPLoad', 'address': '0x7ffb3bdf46b0'}, {'type': 'function', 'name': 'CryptSIPPutSignedDataMsg', 'address': '0x7ffb3beb0620'}, {'type': 'function', 'name': 'CryptSIPRemoveProvider', 'address': '0x7ffb3beb0380'}, {'type': 'function', 'name': 'CryptSIPRemoveSignedDataMsg', 'address': '0x7ffb3beb0710'}, {'type': 'function', 'name': 'CryptSIPRetrieveSubjectGuid', 'address': '0x7ffb3be06fd0'}, {'type': 'function', 'name': 'CryptSIPRetrieveSubjectGuidForCatalogFile', 'address': '0x7ffb3be07340'}, {'type': 'function', 'name': 'CryptSIPVerifyIndirectData', 'address': '0x7ffb3be0e140'}, {'type': 'function', 'name': 'CryptSetAsyncParam', 'address': '0x7ffb3be4ec60'}, {'type': 'function', 'name': 'CryptSetKeyIdentifierProperty', 'address': '0x7ffb3be40e80'}, {'type': 'function', 'name': 'CryptSetOIDFunctionValue', 'address': '0x7ffb3be5e2a0'}, {'type': 'function', 'name': 'CryptSignAndEncodeCertificate', 'address': '0x7ffb3bdd1970'}, {'type': 'function', 'name': 'CryptSignAndEncryptMessage', 'address': '0x7ffb3be50e10'}, {'type': 'function', 'name': 'CryptSignCertificate', 'address': '0x7ffb3bdd1b10'}, {'type': 'function', 'name': 'CryptSignMessage', 'address': '0x7ffb3be50f70'}, {'type': 'function', 'name': 'CryptSignMessageWithKey', 'address': '0x7ffb3be51080'}, {'type': 'function', 'name': 'CryptStringToBinaryA', 'address': '0x7ffb3bdfb610'}, {'type': 'function', 'name': 'CryptStringToBinaryW', 'address': '0x7ffb3bdfb470'}, {'type': 'function', 'name': 'CryptUninstallDefaultContext', 'address': '0x7ffb3be714a0'}, {'type': 'function', 'name': 'CryptUnprotectData', 'address': '0x7ffb3be05440'}, {'type': 'function', 'name': 'CryptUnregisterDefaultOIDFunction', 'address': '0x7ffb3be5e3b0'}, {'type': 'function', 'name': 'CryptUnregisterOIDFunction', 'address': '0x7ffb3be5e670'}, {'type': 'function', 'name': 'CryptUnregisterOIDInfo', 'address': '0x7ffb3be5fd90'}, {'type': 'function', 'name': 'CryptVerifyCertificateSignature', 'address': '0x7ffb3be43fd0'}, {'type': 'function', 'name': 'CryptVerifyCertificateSignatureEx', 'address': '0x7ffb3bdd8230'}, {'type': 'function', 'name': 'CryptVerifyDetachedMessageHash', 'address': '0x7ffb3be51310'}, {'type': 'function', 'name': 'CryptVerifyDetachedMessageSignature', 'address': '0x7ffb3be51370'}, {'type': 'function', 'name': 'CryptVerifyMessageHash', 'address': '0x7ffb3be513f0'}, {'type': 'function', 'name': 'CryptVerifyMessageSignature', 'address': '0x7ffb3be51440'}, {'type': 'function', 'name': 'CryptVerifyMessageSignatureWithKey', 'address': '0x7ffb3be514c0'}, {'type': 'function', 'name': 'CryptVerifyTimeStampSignature', 'address': '0x7ffb3bdc4750'}, {'type': 'function', 'name': 'I_CertChainEngineIsDisallowedCertificate', 'address': '0x7ffb3bdf9b20'}, {'type': 'function', 'name': 'I_CertDiagControl', 'address': '0x7ffb3bdcbec0'}, {'type': 'function', 'name': 'I_CertFinishSslHandshake', 'address': '0x7ffb3bdf7a70'}, {'type': 'function', 'name': 'I_CertProcessSslHandshake', 'address': '0x7ffb3bdf7c20'}, {'type': 'function', 'name': 'I_CertProtectFunction', 'address': '0x7ffb3bdcdbc0'}, {'type': 'function', 'name': 'I_CertSrvProtectFunction', 'address': '0x7ffb3bdfae40'}, {'type': 'function', 'name': 'I_CertSyncStore', 'address': '0x7ffb3be41040'}, {'type': 'function', 'name': 'I_CertUpdateStore', 'address': '0x7ffb3bdc63a0'}, {'type': 'function', 'name': 'I_CertWnfEnableFlushCache', 'address': '0x7ffb3be10260'}, {'type': 'function', 'name': 'I_CryptAddRefLruEntry', 'address': '0x7ffb3be0aa90'}, {'type': 'function', 'name': 'I_CryptAddSmartCardCertToStore', 'address': '0x7ffb3be75260'}, {'type': 'function', 'name': 'I_CryptAllocTls', 'address': '0x7ffb3bdf9fe0'}, {'type': 'function', 'name': 'I_CryptAllocTlsEx', 'address': '0x7ffb3bdfa6e0'}, {'type': 'function', 'name': 'I_CryptCreateLruCache', 'address': '0x7ffb3bdd8030'}, {'type': 'function', 'name': 'I_CryptCreateLruEntry', 'address': '0x7ffb3bdd9920'}, {'type': 'function', 'name': 'I_CryptDetachTls', 'address': '0x7ffb3be06870'}, {'type': 'function', 'name': 'I_CryptDisableLruOfEntries', 'address': '0x7ffb3be91ae0'}, {'type': 'function', 'name': 'I_CryptEnableLruOfEntries', 'address': '0x7ffb3be91b30'}, {'type': 'function', 'name': 'I_CryptEnumMatchingLruEntries', 'address': '0x7ffb3bdc3270'}, {'type': 'function', 'name': 'I_CryptFindLruEntry', 'address': '0x7ffb3bdda0a0'}, {'type': 'function', 'name': 'I_CryptFindLruEntryData', 'address': '0x7ffb3be91ba0'}, {'type': 'function', 'name': 'I_CryptFindSmartCardCertInStore', 'address': '0x7ffb3be75380'}, {'type': 'function', 'name': 'I_CryptFlushLruCache', 'address': '0x7ffb3bdd2270'}, {'type': 'function', 'name': 'I_CryptFreeLruCache', 'address': '0x7ffb3be0cd60'}, {'type': 'function', 'name': 'I_CryptFreeTls', 'address': '0x7ffb3bdfa7b0'}, {'type': 'function', 'name': 'I_CryptGetAsn1Decoder', 'address': '0x7ffb3bde7450'}, {'type': 'function', 'name': 'I_CryptGetAsn1Encoder', 'address': '0x7ffb3bdec490'}, {'type': 'function', 'name': 'I_CryptGetDefaultCryptProv', 'address': '0x7ffb3bdf5180'}, {'type': 'function', 'name': 'I_CryptGetDefaultCryptProvForEncrypt', 'address': '0x7ffb3be71620'}, {'type': 'function', 'name': 'I_CryptGetFileVersion', 'address': '0x7ffb3be2f550'}, {'type': 'function', 'name': 'I_CryptGetLruEntryData', 'address': '0x7ffb3bdd7150'}, {'type': 'function', 'name': 'I_CryptGetLruEntryIdentifier', 'address': '0x7ffb3be91bd0'}, {'type': 'function', 'name': 'I_CryptGetOssGlobal', 'address': '0x7ffb3be755f0'}, {'type': 'function', 'name': 'I_CryptGetTls', 'address': '0x7ffb3bde8660'}, {'type': 'function', 'name': 'I_CryptInsertLruEntry', 'address': '0x7ffb3bdd97e0'}, {'type': 'function', 'name': 'I_CryptInstallAsn1Module', 'address': '0x7ffb3bdf9ff0'}, {'type': 'function', 'name': 'I_CryptInstallOssGlobal', 'address': '0x7ffb3be755f0'}, {'type': 'function', 'name': 'I_CryptReadTrustedPublisherDWORDValueFromRegistry', 'address': '0x7ffb3bdefe00'}, {'type': 'function', 'name': 'I_CryptRegisterSmartCardStore', 'address': '0x7ffb3be4ec60'}, {'type': 'function', 'name': 'I_CryptReleaseLruEntry', 'address': '0x7ffb3bdd9760'}, {'type': 'function', 'name': 'I_CryptRemoveLruEntry', 'address': '0x7ffb3bdc6320'}, {'type': 'function', 'name': 'I_CryptSetTls', 'address': '0x7ffb3bdeca00'}, {'type': 'function', 'name': 'I_CryptTouchLruEntry', 'address': '0x7ffb3bdc6050'}, {'type': 'function', 'name': 'I_CryptUninstallAsn1Module', 'address': '0x7ffb3bdfaf80'}, {'type': 'function', 'name': 'I_CryptUninstallOssGlobal', 'address': '0x7ffb3be755f0'}, {'type': 'function', 'name': 'I_CryptUnregisterSmartCardStore', 'address': '0x7ffb3be4ec60'}, {'type': 'function', 'name': 'I_CryptWalkAllLruCacheEntries', 'address': '0x7ffb3be06c90'}, {'type': 'function', 'name': 'I_PFXDecrypt', 'address': '0x7ffb3be9d940'}, {'type': 'function', 'name': 'I_PFXHMAC', 'address': '0x7ffb3be9dd60'}, {'type': 'function', 'name': 'I_PFXImportCertStoreEx', 'address': '0x7ffb3be99860'}, {'type': 'function', 'name': 'PFXExportCertStore', 'address': '0x7ffb3be9a320'}, {'type': 'function', 'name': 'PFXExportCertStore2', 'address': '0x7ffb3be9a480'}, {'type': 'function', 'name': 'PFXExportCertStoreEx', 'address': '0x7ffb3be9a4a0'}, {'type': 'function', 'name': 'PFXImportCertStore', 'address': '0x7ffb3be9a730'}, {'type': 'function', 'name': 'PFXIsPFXBlob', 'address': '0x7ffb3be9a760'}, {'type': 'function', 'name': 'PFXVerifyPassword', 'address': '0x7ffb3be9a7f0'}]
| 12,668.5
| 25,324
| 0.700004
| 1,804
| 25,337
| 9.803769
| 0.316519
| 0.198123
| 0.264164
| 0.048061
| 0.065136
| 0.010743
| 0
| 0
| 0
| 0
| 0
| 0.080997
| 0.069306
| 25,337
| 2
| 25,324
| 12,668.5
| 0.669013
| 0
| 0
| 0
| 0
| 0
| 0.699424
| 0.202542
| 0
| 0
| 0.161339
| 0
| 0
| 1
| 0
| false
| 0.5
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
f4a1080d5af6bc7222a6f5cdd899b208f6afdf45
| 3,434
|
py
|
Python
|
python/tests/test_cg.py
|
Dareen/pubnub-python
|
713e98b53f6623a8abca2cee8a47fd92ceb7a75b
|
[
"MIT"
] | null | null | null |
python/tests/test_cg.py
|
Dareen/pubnub-python
|
713e98b53f6623a8abca2cee8a47fd92ceb7a75b
|
[
"MIT"
] | null | null | null |
python/tests/test_cg.py
|
Dareen/pubnub-python
|
713e98b53f6623a8abca2cee8a47fd92ceb7a75b
|
[
"MIT"
] | 1
|
2019-09-10T04:07:35.000Z
|
2019-09-10T04:07:35.000Z
|
from pubnub import Pubnub
import time
import random
pubnub = Pubnub("demo","demo")
pubnub.set_u(True)
def rand_str(s):
return str(s) + '-' + str(random.randint(1, 100000000000))
def test_1():
channel = rand_str('channel')
channel2 = rand_str('channel')
channel_group = rand_str('group')
channel_group2 = rand_str('group')
namespace = rand_str('ns')
resp = pubnub.channel_group_add_channel(channel_group=namespace + ':' + channel_group, channel=channel)
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_add_channel(channel_group=namespace + ':' + channel_group, channel=channel2)
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_add_channel(channel_group=namespace + ':' + channel_group2, channel=channel)
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_add_channel(channel_group=namespace + ':' + channel_group2, channel=channel2)
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_list_channels(channel_group=namespace + ':' + channel_group)
assert channel in resp['payload']['channels']
assert channel2 in resp['payload']['channels']
assert len(resp['payload']['channels']) == 2
resp = pubnub.channel_group_remove_channel(channel_group=namespace + ':' + channel_group, channel=channel2)
print resp
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_list_channels(channel_group=namespace + ':' + channel_group)
print resp
assert channel in resp['payload']['channels']
assert len(resp['payload']['channels']) == 1
resp = pubnub.channel_group_list_channels(channel_group=namespace + ':' + channel_group2)
assert channel in resp['payload']['channels']
assert channel2 in resp['payload']['channels']
assert len(resp['payload']['channels']) == 2
resp = pubnub.channel_group_remove_channel(channel_group=namespace + ':' + channel_group2, channel=channel2)
print resp
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_list_channels(channel_group=namespace + ':' + channel_group2)
print resp
assert channel in resp['payload']['channels']
assert len(resp['payload']['channels']) == 1
resp = pubnub.channel_group_list_groups(namespace=namespace)
assert channel_group in resp['payload']['groups']
assert channel_group2 in resp['payload']['groups']
assert len(resp['payload']['groups']) == 2
resp = pubnub.channel_group_remove_group(channel_group=namespace + ':' + channel_group2)
print resp
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_list_groups(namespace=namespace)
assert channel_group in resp['payload']['groups']
assert len(resp['payload']['groups']) == 1
resp = pubnub.channel_group_list_namespaces()
assert namespace in resp['payload']['namespaces']
resp = pubnub.channel_group_remove_namespace(namespace=namespace)
print resp
assert resp['status'] == 200
assert resp['message'] == 'OK'
assert resp['error'] == False
resp = pubnub.channel_group_list_namespaces()
assert namespace not in resp['payload']['namespaces']
| 31.796296
| 109
| 0.717822
| 436
| 3,434
| 5.463303
| 0.100917
| 0.176322
| 0.11419
| 0.147775
| 0.857263
| 0.845508
| 0.832914
| 0.832914
| 0.782116
| 0.754828
| 0
| 0.019765
| 0.130751
| 3,434
| 107
| 110
| 32.093458
| 0.778224
| 0
| 0
| 0.657895
| 0
| 0
| 0.132653
| 0
| 0
| 0
| 0
| 0
| 0.539474
| 0
| null | null | 0
| 0.039474
| null | null | 0.078947
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f4a68715c22c7dd76533253c3f022db9360c134f
| 107
|
py
|
Python
|
car_core/scripts/car_core/common/__init__.py
|
vstucar/vstucar
|
46ba2aebed1d9bd76a66db06af2bd8a4384b403d
|
[
"MIT"
] | null | null | null |
car_core/scripts/car_core/common/__init__.py
|
vstucar/vstucar
|
46ba2aebed1d9bd76a66db06af2bd8a4384b403d
|
[
"MIT"
] | null | null | null |
car_core/scripts/car_core/common/__init__.py
|
vstucar/vstucar
|
46ba2aebed1d9bd76a66db06af2bd8a4384b403d
|
[
"MIT"
] | null | null | null |
from . import geom_helpers, msgs_helpers, rviz_helpers
__all__ = [geom_helpers, msgs_helpers, rviz_helpers]
| 53.5
| 54
| 0.831776
| 15
| 107
| 5.266667
| 0.466667
| 0.278481
| 0.379747
| 0.556962
| 0.835443
| 0.835443
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093458
| 107
| 2
| 55
| 53.5
| 0.814433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
f4c73420df0ffa6198882fd80815c6501c2d5a11
| 104
|
py
|
Python
|
serializers/templates/__init__.py
|
d0cd/vectre-x86-disasm
|
098a2a67c8d588d9960150a6eda538f42694548b
|
[
"MIT"
] | null | null | null |
serializers/templates/__init__.py
|
d0cd/vectre-x86-disasm
|
098a2a67c8d588d9960150a6eda538f42694548b
|
[
"MIT"
] | null | null | null |
serializers/templates/__init__.py
|
d0cd/vectre-x86-disasm
|
098a2a67c8d588d9960150a6eda538f42694548b
|
[
"MIT"
] | null | null | null |
from .prog_def_template import *
from .platform_def_template import *
from .inst_def_template import *
| 20.8
| 36
| 0.817308
| 15
| 104
| 5.266667
| 0.466667
| 0.417722
| 0.64557
| 0.531646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 104
| 4
| 37
| 26
| 0.868132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f4f1f45161e522773f69331441d95190bb58bd7c
| 2,893
|
py
|
Python
|
pyaz/afd/secret/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/afd/secret/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/afd/secret/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage secrets within the specified profile.
'''
from ... pyaz_utils import _call_az
def show(profile_name, resource_group, secret_name):
'''
Required Parameters:
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- secret_name -- Name of the secret.
'''
return _call_az("az afd secret show", locals())
def delete(profile_name, resource_group, secret_name, yes=None):
'''
Required Parameters:
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- secret_name -- Name of the secret.
Optional Parameters:
- yes -- Do not prompt for confirmation.
'''
return _call_az("az afd secret delete", locals())
def list(profile_name, resource_group):
'''
Required Parameters:
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az afd secret list", locals())
def create(profile_name, resource_group, secret_name, secret_source, secret_version=None, use_latest_version=None):
'''
Creates a new secret within the specified profile.
Required Parameters:
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- secret_name -- Name of the secret.
- secret_source -- ID of the Azure key vault certificate.
Optional Parameters:
- secret_version -- Version of the certificate to be used.
- use_latest_version -- Whether to use the latest version for the certificate.
'''
return _call_az("az afd secret create", locals())
def update(profile_name, resource_group, secret_name, secret_source=None, secret_version=None, use_latest_version=None):
'''
Update an existing secret within the specified profile.
Required Parameters:
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- secret_name -- Name of the secret.
Optional Parameters:
- secret_source -- ID of the Azure key vault certificate.
- secret_version -- Version of the certificate to be used.
- use_latest_version -- Whether to use the latest version for the certificate.
'''
return _call_az("az afd secret update", locals())
| 37.571429
| 128
| 0.711718
| 393
| 2,893
| 5.094148
| 0.16285
| 0.12987
| 0.044955
| 0.058442
| 0.865634
| 0.865634
| 0.797203
| 0.76024
| 0.714286
| 0.671329
| 0
| 0
| 0.204978
| 2,893
| 76
| 129
| 38.065789
| 0.870435
| 0.675769
| 0
| 0
| 0
| 0
| 0.131148
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
7625226fe17a880b6f098a1fe26757979e4659cb
| 1,975
|
py
|
Python
|
tests/test_pykdebugparser.py
|
matan1008/pykdebugparser
|
e219c2434d012b935ee25f75571647aaed1a4dda
|
[
"MIT"
] | 10
|
2021-06-17T14:07:38.000Z
|
2021-12-09T18:33:48.000Z
|
tests/test_pykdebugparser.py
|
matan1008/pykdebugparser
|
e219c2434d012b935ee25f75571647aaed1a4dda
|
[
"MIT"
] | null | null | null |
tests/test_pykdebugparser.py
|
matan1008/pykdebugparser
|
e219c2434d012b935ee25f75571647aaed1a4dda
|
[
"MIT"
] | 3
|
2021-06-22T13:01:59.000Z
|
2021-06-27T03:35:04.000Z
|
from io import BytesIO
from pykdebugparser.kd_buf_parser import RAW_VERSION2_BYTES
from pykdebugparser.kevent import Kevent
from pykdebugparser.pykdebugparser import PyKdebugParser
def test_kevents():
events_buf = RAW_VERSION2_BYTES + b'\x00' * 0x11c
events_buf += (b'\xa50\x147_\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\xc6\x01\x00\x00\x00\x00\x00\x00y\xd8\t\x00\x00\x00\x00'
b'\x00*\x03\x0c\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
parser = PyKdebugParser()
events = list(parser.kevents(BytesIO(events_buf)))
assert events == [
Kevent(timestamp=7006015729829,
data=(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\xc6\x01\x00\x00\x00\x00\x00\x00'),
values=(0, 0, 0, 454), tid=645241, debugid=67896106, eventid=67896104, func_qualifier=2)
]
def test_kevents_filter_tid():
events_buf = RAW_VERSION2_BYTES + b'\x00' * 0x11c
events_buf += (b'\xa50\x147_\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\xc6\x01\x00\x00\x00\x00\x00\x00y\xd8\t\x00\x00\x00\x00'
b'\x00*\x03\x0c\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
parser = PyKdebugParser()
parser.filter_tid = 645241
events = list(parser.kevents(BytesIO(events_buf)))
assert events == [
Kevent(timestamp=7006015729829,
data=(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\xc6\x01\x00\x00\x00\x00\x00\x00'),
values=(0, 0, 0, 454), tid=645241, debugid=67896106, eventid=67896104, func_qualifier=2)
]
parser.filter_tid = 3
events = list(parser.kevents(BytesIO(events_buf)))
assert events == []
| 49.375
| 109
| 0.652152
| 318
| 1,975
| 3.971698
| 0.154088
| 0.655582
| 0.869359
| 1.007126
| 0.81631
| 0.81631
| 0.81631
| 0.81631
| 0.81631
| 0.77593
| 0
| 0.284295
| 0.184304
| 1,975
| 39
| 110
| 50.641026
| 0.49969
| 0
| 0
| 0.676471
| 0
| 0.294118
| 0.375696
| 0.371646
| 0
| 0
| 0.005063
| 0
| 0.088235
| 1
| 0.058824
| false
| 0
| 0.117647
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.