hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
636bf41579234c486af942ade34b8e5f46205e18
117
py
Python
battlescribe_parser/roster/element.py
JKolios/battlewhoosh
9a833e40dd3f9291d89968c279894fd9be3f37cf
[ "MIT" ]
1
2021-10-10T07:13:39.000Z
2021-10-10T07:13:39.000Z
battlescribe_parser/roster/element.py
JKolios/battlewhoosh
9a833e40dd3f9291d89968c279894fd9be3f37cf
[ "MIT" ]
null
null
null
battlescribe_parser/roster/element.py
JKolios/battlewhoosh
9a833e40dd3f9291d89968c279894fd9be3f37cf
[ "MIT" ]
null
null
null
import battlescribe_parser.bsdata.element class RosterElement(battlescribe_parser.bsdata.element.Element): pass
23.4
64
0.846154
13
117
7.461538
0.615385
0.371134
0.494845
0.639175
0
0
0
0
0
0
0
0
0.08547
117
4
65
29.25
0.906542
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
9
636feeedecd201d39b14bc6b5709501941479ffd
2,945
py
Python
views.py
kelvin53/News-Highlight
2a2b30ec5aac9180900d38819cf601c57c204005
[ "MIT" ]
null
null
null
views.py
kelvin53/News-Highlight
2a2b30ec5aac9180900d38819cf601c57c204005
[ "MIT" ]
null
null
null
views.py
kelvin53/News-Highlight
2a2b30ec5aac9180900d38819cf601c57c204005
[ "MIT" ]
null
null
null
from flask import Flask, render_template from newsapi import NewsApiClient app = Flask(__name__) @app.route('/') def Index(): newsapi = NewsApiClient(api_key="bbb081415b6b417eb5d3fbfe57199cf8") topheadlines = newsapi.get_top_headlines(sources="the-wall-street-journal") articles = topheadlines['articles'] desc = [] news = [] img = [] url = [] publAt = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) img.append(myarticles['urlToImage']) url.append(myarticles['url']) publAt.append(myarticles['publishedAt']) mylist = zip(news, desc, img,url,publAt) return render_template('index.html', context = mylist) @app.route('/reuters') def reuters(): newsapi = NewsApiClient(api_key="bbb081415b6b417eb5d3fbfe57199cf8") topheadlines = newsapi.get_top_headlines(sources="reuters") articles = topheadlines['articles'] desc = [] news = [] img = [] url = [] publAt = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) img.append(myarticles['urlToImage']) url.append(myarticles['url']) publAt.append(myarticles['publishedAt']) mylist = zip(news, desc, img,url,publAt) return render_template('reuters.html', context = mylist) @app.route('/techcrunch') def techcrunch(): newsapi = NewsApiClient(api_key="bbb081415b6b417eb5d3fbfe57199cf8") topheadlines = newsapi.get_top_headlines(sources="techcrunch") articles = topheadlines['articles'] desc = [] news = [] img = [] url = [] publAt = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) img.append(myarticles['urlToImage']) url.append(myarticles['url']) publAt.append(myarticles['publishedAt']) mylist = zip(news, desc, img,url,publAt) return render_template('techcrunch.html', context = mylist) @app.route('/insider') def insider(): newsapi = NewsApiClient(api_key="bbb081415b6b417eb5d3fbfe57199cf8") topheadlines = newsapi.get_top_headlines(sources="business-insider") articles = topheadlines['articles'] desc = [] news = [] img = [] url = [] publAt = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) img.append(myarticles['urlToImage']) url.append(myarticles['url']) publAt.append(myarticles['publishedAt']) mylist = zip(news, desc, img,url,publAt) return render_template('insider.html', context = mylist) if __name__ == "__main__": app.run(debug=True) app.run
25.833333
79
0.641766
297
2,945
6.265993
0.185185
0.171951
0.051585
0.055884
0.850618
0.810317
0.810317
0.810317
0.810317
0.810317
0
0.031196
0.216299
2,945
114
80
25.833333
0.77513
0
0
0.731707
0
0
0.156483
0.051256
0
0
0
0
0
1
0.04878
false
0
0.02439
0
0.121951
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
63b23d8a11c3295e47c1e3738c0d8909fa857169
1,934
py
Python
test.py
ignis-sec/allogate
98c9f15a18c112693a5ddb28301aaca72cce04d3
[ "MIT" ]
4
2021-06-14T00:30:34.000Z
2021-08-05T03:04:35.000Z
test.py
FlameOfIgnis/allogate
98c9f15a18c112693a5ddb28301aaca72cce04d3
[ "MIT" ]
null
null
null
test.py
FlameOfIgnis/allogate
98c9f15a18c112693a5ddb28301aaca72cce04d3
[ "MIT" ]
1
2021-06-14T00:30:35.000Z
2021-06-14T00:30:35.000Z
import allogate as logging logging.VERBOSITY=1 print(f"Verbosity = {logging.VERBOSITY}") logging.pprint(f"Hello, this is a failure", 0) logging.pprint(f"Hello, this is a success", 1) logging.pprint(f"Hello, this is a warning", 2) logging.pprint(f"Hello, this is an info" , 3) logging.pprint(f"Hello, this is verbose" , 4) logging.pprint(f"Hello, this is very verbose" , 12) logging.VERBOSITY=3 print(f"Verbosity = {logging.VERBOSITY}") logging.pprint(f"Hello, this is a failure", 0) logging.pprint(f"Hello, this is a success", 1) logging.pprint(f"Hello, this is a warning", 2) logging.pprint(f"Hello, this is an info" , 3) logging.pprint(f"Hello, this is verbose" , 4) logging.pprint(f"Hello, this is very verbose" , 12) logging.VERBOSITY=5 print(f"Verbosity = {logging.VERBOSITY}") logging.pprint(f"Hello, this is a failure", 0) logging.pprint(f"Hello, this is a success", 1) logging.pprint(f"Hello, this is a warning", 2) logging.pprint(f"Hello, this is an info" , 3) logging.pprint(f"Hello, this is verbose" , 4) logging.pprint(f"Hello, this is very verbose" , 12) logging.VERBOSITY=15 print(f"Verbosity = {logging.VERBOSITY}") logging.pprint(f"Hello, this is a failure", 0) logging.pprint(f"Hello, this is a success", 1) logging.pprint(f"Hello, this is a warning", 2) logging.pprint(f"Hello, this is an info" , 3) logging.pprint(f"Hello, this is verbose" , 4) logging.pprint(f"Hello, this is very verbose" , 12) logging.pprint(f"Clamp me AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" , 12) def test_function(): logging.pprint(f"Hello, this is a failure", 0) logging.pprint(f"Hello, this is a success", 1) logging.pprint(f"Hello, this is a warning", 2) logging.pprint(f"Hello, this is an info" , 3) logging.pprint(f"Hello, this is verbose" , 4) logging.pprint(f"Hello, this is very verbose" , 12) test_function()
36.490566
146
0.726474
313
1,934
4.482428
0.105431
0.287242
0.309337
0.406272
0.858161
0.858161
0.858161
0.858161
0.858161
0.858161
0
0.02521
0.138573
1,934
53
147
36.490566
0.816927
0
0
0.809524
0
0
0.496124
0.057881
0
0
0
0
0
1
0.02381
true
0
0.02381
0
0.047619
0.833333
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
12
8925388c73c6f47df09ae3bb50c4734cfb467b06
115
py
Python
shs/gardener/__init__.py
SeohaSong/tools
77d087a5f8c864260225ef7701d78df1c82c6582
[ "MIT" ]
null
null
null
shs/gardener/__init__.py
SeohaSong/tools
77d087a5f8c864260225ef7701d78df1c82c6582
[ "MIT" ]
null
null
null
shs/gardener/__init__.py
SeohaSong/tools
77d087a5f8c864260225ef7701d78df1c82c6582
[ "MIT" ]
null
null
null
from .init import get_global_filepaths from .init import get_global_dir_paths from .init import clear_current_dir
23
38
0.86087
19
115
4.842105
0.526316
0.26087
0.456522
0.369565
0.5
0
0
0
0
0
0
0
0.113043
115
4
39
28.75
0.901961
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
893a97fb2a90a0bf0693555f77d063d7b31f4e05
928
py
Python
test/integration/test_graylogapi.py
zmallen/pygraylog
cda2c6b583e8c7de47e98458b3faeae7d05a94d3
[ "Apache-2.0" ]
14
2016-08-29T16:31:14.000Z
2021-11-30T10:39:29.000Z
test/integration/test_graylogapi.py
zmallen/pygraylog
cda2c6b583e8c7de47e98458b3faeae7d05a94d3
[ "Apache-2.0" ]
9
2016-08-28T15:23:47.000Z
2018-02-07T20:11:18.000Z
test/integration/test_graylogapi.py
zmallen/pygraylog
cda2c6b583e8c7de47e98458b3faeae7d05a94d3
[ "Apache-2.0" ]
16
2016-10-04T17:37:42.000Z
2021-07-08T15:43:50.000Z
import pytest from pygraylog.pygraylog import graylogapi def test_get(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') res = api._get() expected = "{\"one\": \"two\"}\n" assert res == expected def test_post(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._post() def test_put(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._put() def test_delete(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._delete()
28.121212
68
0.601293
99
928
5.555556
0.282828
0.054545
0.167273
0.196364
0.730909
0.730909
0.730909
0.730909
0.730909
0.730909
0
0
0.248922
928
32
69
29
0.789096
0
0
0.555556
0
0
0.18123
0
0
0
0
0
0.037037
1
0.148148
false
0.148148
0.074074
0
0.222222
0
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
89768799b001430dfcab3c81814f87c6de49fc5c
350
py
Python
devilry/devilry_gradingsystem/tests/__init__.py
aless80/devilry-django
416c262e75170d5662542f15e2d7fecf5ab84730
[ "BSD-3-Clause" ]
29
2015-01-18T22:56:23.000Z
2020-11-10T21:28:27.000Z
devilry/devilry_gradingsystem/tests/__init__.py
aless80/devilry-django
416c262e75170d5662542f15e2d7fecf5ab84730
[ "BSD-3-Clause" ]
786
2015-01-06T16:10:18.000Z
2022-03-16T11:10:50.000Z
devilry/devilry_gradingsystem/tests/__init__.py
aless80/devilry-django
416c262e75170d5662542f15e2d7fecf5ab84730
[ "BSD-3-Clause" ]
15
2015-04-06T06:18:43.000Z
2021-02-24T12:28:30.000Z
from .views.test_feedbackdraft_preview import * from .views.admin.test_selectplugin import * from .views.admin.test_summary import * from .views.admin.test_setmaxpoints import * from .views.admin.test_select_points_to_grade_mapper import * from .views.admin.test_setup_custom_table import * from .views.admin.test_setpassing_grade_min_points import *
50
61
0.845714
51
350
5.470588
0.392157
0.225806
0.322581
0.430108
0.516129
0
0
0
0
0
0
0
0.077143
350
7
62
50
0.863777
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.142857
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
8
98221732c5a4fafa66438edacb8ce26540b9d6b0
4,635
py
Python
tests/unit/extutils/arr.py
RaenonX/Jelly-Bot-API
c7da1e91783dce3a2b71b955b3a22b68db9056cf
[ "MIT" ]
5
2020-08-26T20:12:00.000Z
2020-12-11T16:39:22.000Z
tests/unit/extutils/arr.py
RaenonX/Jelly-Bot
c7da1e91783dce3a2b71b955b3a22b68db9056cf
[ "MIT" ]
234
2019-12-14T03:45:19.000Z
2020-08-26T18:55:19.000Z
tests/unit/extutils/arr.py
RaenonX/Jelly-Bot-API
c7da1e91783dce3a2b71b955b3a22b68db9056cf
[ "MIT" ]
2
2019-10-23T15:21:15.000Z
2020-05-22T09:35:55.000Z
from extutils.arr import extract_list_action, extract_one from tests.base import TestCase __all__ = ["TestExtractList", "TestExtractOne"] def action(data): for i in range(len(data)): data[i] = True return data def action_set_value(data, value): for i in range(len(data)): data[i] = value return data def action_not_mutate(_): return [7] class TestExtractOne(TestCase): def test_extract_list(self): self.assertEqual(extract_one([1]), 1) def test_extract_list_empty(self): self.assertIsNone(extract_one([])) def test_extract_list_2d(self): self.assertEqual(extract_one([[1]]), [1]) def test_extract_list_2d_empty(self): self.assertEqual(extract_one([[]]), []) def test_extract_set(self): self.assertEqual(extract_one({1}), 1) def test_extract_set_empty(self): self.assertIsNone(extract_one(set())) def test_extract_tuple(self): self.assertEqual(extract_one((1,)), 1) def test_extract_tuple_empty(self): self.assertIsNone(extract_one(())) # noinspection PyTypeChecker def test_extract_type_miss(self): with self.assertRaises(TypeError): extract_one(7) with self.assertRaises(TypeError): extract_one("ABC") class TestExtractList(TestCase): def test_empty_1d(self): data = [] expected = [] self.assertEqual(extract_list_action(data, action), expected) self.assertEqual(data, expected) def test_empty_1d_with_args(self): data = [] expected = [] self.assertEqual(extract_list_action(data, action_set_value, 5), expected) self.assertEqual(data, expected) def test_empty_2d(self): data = [[]] expected = [[]] self.assertEqual(extract_list_action(data, action), expected) self.assertEqual(data, expected) def test_empty_2d_with_args(self): data = [[]] expected = [[]] self.assertEqual(extract_list_action(data, action_set_value, 5), expected) self.assertEqual(data, expected) def test_empty_3d(self): data = [[[]]] expected = [[[]]] self.assertEqual(extract_list_action(data, action), expected) self.assertEqual(data, expected) def test_empty_3d_with_args(self): data = [[[]]] expected = [[[]]] self.assertEqual(extract_list_action(data, action_set_value, 5), expected) self.assertEqual(data, expected) def test_1d(self): data = [False, False] expected = [True, True] self.assertEqual(extract_list_action(data, action), expected) self.assertEqual(data, expected) def test_1d_with_args(self): data = [False, False] expected = [5, 5] self.assertEqual(extract_list_action(data, action_set_value, 5), expected) self.assertEqual(data, expected) def test_2d_regular(self): data = [[False, False], [False, False]] expected = [[True, True], [True, True]] self.assertEqual(extract_list_action(data, action), expected) self.assertEqual(data, expected) def test_2d_irregular(self): data = [[False, False], [False, False], [False]] expected = [[True, True], [True, True], [True]] self.assertEqual(extract_list_action(data, action), expected) self.assertEqual(data, [[True, True], [True, True], [True]]) def test_2d_with_args(self): data = [[False, False], [False, False], [False]] expected = [[5, 5], [5, 5], [5]] self.assertEqual(extract_list_action(data, action_set_value, 5), expected) self.assertEqual(data, expected) def test_3d(self): data = [[[False, False], [False, False]], [[False, False], [False, False]]] expected = [[[True, True], [True, True]], [[True, True], [True, True]]] self.assertEqual(extract_list_action(data, action), expected) self.assertEqual(data, expected) def test_3d_not_mutate(self): data = [[[False, False], [False, False]], [[False, False], [False, False]]] self.assertEqual(extract_list_action(data, action_not_mutate), [[[7], [7]], [[7], [7]]]) self.assertEqual(data, [[[False, False], [False, False]], [[False, False], [False, False]]]) def test_3d_with_args(self): data = [[[False, False], [False, False]], [[False, False], [False, False]]] expected = [[[5, 5], [5, 5]], [[5, 5], [5, 5]]] self.assertEqual(extract_list_action(data, action_set_value, 5), expected) self.assertEqual(data, expected)
29.903226
100
0.625674
552
4,635
5.027174
0.096014
0.147748
0.172973
0.18018
0.838919
0.793874
0.715676
0.700541
0.676757
0.639279
0
0.014298
0.230421
4,635
154
101
30.097403
0.763667
0.005609
0
0.47619
0
0
0.006946
0
0
0
0
0
0.361905
1
0.247619
false
0
0.019048
0.009524
0.314286
0
0
0
0
null
0
0
1
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
9824835424a7983fea53dab3aa5aec253869fce6
2,024
py
Python
tests/jobs/test_job_downloads.py
kids-first/kf-api-study-creator
93a79b108b6474f9b4135ace06c89ddcf63dd257
[ "Apache-2.0" ]
3
2019-05-04T02:07:28.000Z
2020-10-16T17:47:44.000Z
tests/jobs/test_job_downloads.py
kids-first/kf-api-study-creator
93a79b108b6474f9b4135ace06c89ddcf63dd257
[ "Apache-2.0" ]
604
2019-02-21T18:14:51.000Z
2022-02-10T08:13:54.000Z
tests/jobs/test_job_downloads.py
kids-first/kf-api-study-creator
93a79b108b6474f9b4135ace06c89ddcf63dd257
[ "Apache-2.0" ]
null
null
null
import pytest from django.core import management from django.http.response import HttpResponse from creator.jobs.models import Job, JobLog def test_job_download_url(clients, db, mocker): client = clients.get("Administrators") job = Job() job.save() assert Job.objects.count() == 1 log = JobLog(job_id=job.pk) log.save() assert JobLog.objects.count() == 1 mock_resp = mocker.patch("creator.jobs.views.HttpResponse") mock_resp.return_value = HttpResponse(open("tests/data/data.csv")) query = "{allJobLogs { edges { node { downloadUrl } } } }" query_data = {"query": query.strip()} resp = client.post( "/graphql", data=query_data, content_type="application/json" ) assert resp.status_code == 200 assert "data" in resp.json() assert "allJobLogs" in resp.json()["data"] jobLog_json = resp.json()["data"]["allJobLogs"]["edges"][0]["node"] expect_url = ( f"https://testserver/logs/{log.pk}" ) assert jobLog_json["downloadUrl"] == expect_url def test_job_download_url_develop(clients, db, mocker, settings): settings.DEVELOP = True management.call_command("setup_test_user") client = clients.get("Administrators") job = Job() job.save() assert Job.objects.count() == 1 log = JobLog(job_id=job.pk) log.save() assert JobLog.objects.count() == 1 mock_resp = mocker.patch("creator.jobs.views.HttpResponse") mock_resp.return_value = HttpResponse(open("tests/data/data.csv")) query = "{allJobLogs { edges { node { downloadUrl } } } } " query_data = {"query": query.strip()} resp = client.post( "/graphql", data=query_data, content_type="application/json" ) assert resp.status_code == 200 assert "data" in resp.json() assert "allJobLogs" in resp.json()["data"] jobLog_json = resp.json()["data"]["allJobLogs"]["edges"][0]["node"] expect_url = ( f"http://testserver/logs/{log.pk}" ) assert jobLog_json["downloadUrl"] == expect_url
32.126984
71
0.655632
255
2,024
5.070588
0.278431
0.037123
0.040217
0.027842
0.825986
0.793503
0.793503
0.793503
0.793503
0.793503
0
0.007322
0.190217
2,024
62
72
32.645161
0.781574
0
0
0.730769
0
0
0.229743
0.030632
0
0
0
0
0.230769
1
0.038462
false
0
0.076923
0
0.115385
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
9860b5d628799cd35fa93a25151de4b15c61a4ec
37,222
py
Python
tests/test_recruitment_query.py
Agile-Data/flat-ql
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
[ "MIT" ]
3
2022-03-21T05:03:39.000Z
2022-03-23T01:32:51.000Z
tests/test_recruitment_query.py
Agile-Data/flat-ql
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
[ "MIT" ]
null
null
null
tests/test_recruitment_query.py
Agile-Data/flat-ql
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
[ "MIT" ]
null
null
null
import os from flatql import parse_from_hocon_path from flatql.parser.ast import StringLiteral from flatql.parser.flatql_parser import parse_flatql from flatql.rewriter.query import SqlPredicateAppender from flatql.rewriter.sql import SqlTable, SqlComparison, SqlColumn, SqlIdentifier, SqlLiteral, O_EQ from flatql.rewriter.sql_generic import SqlNodeIntrospection from flatql.rewriter.sql_rewriter import SqlRewriter recruitment_schema = parse_from_hocon_path(f"{os.path.dirname(__file__)}/schemas/recruitment") def test_simple_query1(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql("SELECT Tenant.name FROM recruitment").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "ta_0"."name" FROM "v_tenant" AS "ta_0"' parse_flatql("SELECT Tenant.name AS a1 FROM recruitment").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "ta_0"."name" AS "a1" FROM "v_tenant" AS "ta_0"' def test_simple_query2(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql("SELECT COUNT(Tenant.name) FROM recruitment").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT COUNT("ta_0"."name") FROM "v_tenant" AS "ta_0"' parse_flatql("SELECT COUNT(Tenant.name) AS a1 FROM recruitment").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT COUNT("ta_0"."name") AS "a1" FROM "v_tenant" AS "ta_0"' def test_simple_query3(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql("SELECT Tenant.name AS \"租户\", Channel.name AS \"频道\", Job.name AS \"职位\" " "FROM recruitment WHERE Tenant.name = 'tenant-98' ORDER BY \"频道\" LIMIT 10 OFFSET 0").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "ta_0"."name" AS "租户", "ta_1"."name" AS "频道", "ta_2"."jobName" AS "职位" FROM "v_channels" AS "ta_1" INNER JOIN "v_projects" AS "ta_3" ON "ta_1"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_job" AS "ta_2" ON "ta_3"."payloadType" = \'Job\' AND "ta_3"."payloadOpenId" = "ta_2"."openId" INNER JOIN "v_tenant" AS "ta_0" ON "ta_1"."tenant" = "ta_0"."name" WHERE "ta_0"."name" = \'tenant-98\' ORDER BY "频道" ASC LIMIT 10 OFFSET 0' def test_simple_query4(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql("SELECT Tenant.name AS \"租户\", MIN(ResumeHumaninfo.age) AS \"年龄(最小值)\" , MAX(ResumeHumaninfo.age) AS \"年龄(最大值)\", AVG(ResumeHumaninfo.age) AS \"年龄(平均值)\" " "FROM recruitment WHERE Tenant.name = 'tenant-98' ORDER BY \"租户\" LIMIT 10 OFFSET 0").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "ta_0"."name" AS "租户", MIN("ta_1"."ageNormalized") AS "年龄(最小值)", MAX("ta_1"."ageNormalized") AS "年龄(最大值)", AVG("ta_1"."ageNormalized") AS "年龄(平均值)" FROM "v_channels" AS "ta_2" INNER JOIN "v_projects" AS "ta_3" ON "ta_2"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_flow" AS "ta_4" ON "ta_3"."openId" = "ta_4"."circuitForeignId" INNER JOIN "v_resume" AS "ta_1" ON "ta_1"."openId" = "ta_4"."beanSourceId" INNER JOIN "v_tenant" AS "ta_0" ON "ta_2"."tenant" = "ta_0"."name" WHERE "ta_0"."name" = \'tenant-98\' GROUP BY "租户" ORDER BY "租户" ASC LIMIT 10 OFFSET 0' def test_join_query1(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql("SELECT Tenant.name, Channel.name FROM recruitment").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "ta_0"."name", "ta_1"."name" FROM "v_channels" AS "ta_1" INNER JOIN "v_tenant" AS "ta_0" ON "ta_1"."tenant" = "ta_0"."name"' def test_aggregate_query1(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql("SELECT Tenant.name AS \"租户\", COUNT(Job.name) AS \"职位(数量)\" FROM recruitment " "WHERE Tenant.name IN ('tenant-98', 'tenantshared')").rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "租户", "qu_0"."co_3" AS "职位(数量)" FROM (SELECT "qu_1"."co_1" AS "co_1", SUM("qu_2"."co_2") AS "co_3" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_2" INNER JOIN "v_projects" AS "ta_3" ON "ta_2"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_job" AS "ta_0" ON "ta_3"."payloadType" = \'Job\' AND "ta_3"."payloadOpenId" = "ta_0"."openId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_2"."tenant" = "ta_1"."name" WHERE "ta_1"."name" IN (\'tenant-98\', \'tenantshared\') GROUP BY "co_0", "co_1") AS "qu_1" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."jobName") AS "co_2" FROM "v_job" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_1"."co_0" = "qu_2"."co_0" GROUP BY "co_1") AS "qu_0"' def test_aggregate_query2(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT Tenant.name AS "租户名称", COUNT(Channel.name) AS "频道(数量)",' ' COUNT(Job.name) AS "职位(数量)" FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "租户名称", "qu_1"."co_3" AS "频道(数量)", "qu_0"."co_6" AS "职位(数量)" FROM (SELECT "qu_2"."co_1" AS "co_1", SUM("qu_3"."co_2") AS "co_3" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" GROUP BY "co_0", "co_1") AS "qu_2" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_2" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_3" ON "qu_2"."co_0" = "qu_3"."co_0" GROUP BY "co_1") AS "qu_1" INNER JOIN (SELECT "qu_4"."co_1" AS "co_1", SUM("qu_5"."co_5") AS "co_6" FROM (SELECT "ta_2"."openId" AS "co_4", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_3" ON "ta_0"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_job" AS "ta_2" ON "ta_3"."payloadType" = \'Job\' AND "ta_3"."payloadOpenId" = "ta_2"."openId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" GROUP BY "co_4", "co_1") AS "qu_4" LEFT JOIN (SELECT "ta_2"."openId" AS "co_4", COUNT("ta_2"."jobName") AS "co_5" FROM "v_job" AS "ta_2" GROUP BY "co_4") AS "qu_5" ON "qu_4"."co_4" = "qu_5"."co_4" GROUP BY "co_1") AS "qu_0" ON "qu_1"."co_1" = "qu_0"."co_1"' def test_aggregate_query3(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT Tenant.name AS "租户名称", COUNT(Channel.name) AS "频道(数量)",' ' COUNT(Job.name) AS "职位(数量)", COUNT(ResumeHumaninfo.name) AS "简历(数量)" ' 'FROM recruitment ' 'ORDER BY "租户名称" LIMIT 10 OFFSET 0').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "租户名称", "qu_1"."co_8" AS "频道(数量)", "qu_0"."co_12" AS "职位(数量)", "qu_1"."co_9" AS "简历(数量)" FROM (SELECT "qu_2"."co_1" AS "co_7", "qu_3"."co_3" AS "co_8", "qu_2"."co_6" AS "co_9" FROM (SELECT "qu_4"."co_1" AS "co_1", SUM("qu_5"."co_2") AS "co_3" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" GROUP BY "co_0", "co_1") AS "qu_4" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_2" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_5" ON "qu_4"."co_0" = "qu_5"."co_0" GROUP BY "co_1") AS "qu_3" INNER JOIN (SELECT "qu_6"."co_1" AS "co_1", SUM("qu_7"."co_5") AS "co_6" FROM (SELECT "ta_2"."openId" AS "co_4", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_3" ON "ta_0"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_flow" AS "ta_4" ON "ta_3"."openId" = "ta_4"."circuitForeignId" INNER JOIN "v_resume" AS "ta_2" ON "ta_2"."openId" = "ta_4"."beanSourceId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" GROUP BY "co_4", "co_1") AS "qu_6" LEFT JOIN (SELECT "ta_2"."openId" AS "co_4", COUNT("ta_2"."name") AS "co_5" FROM "v_resume" AS "ta_2" GROUP BY "co_4") AS "qu_7" ON "qu_6"."co_4" = "qu_7"."co_4" GROUP BY "co_1") AS "qu_2" ON "qu_3"."co_1" = "qu_2"."co_1") AS "qu_1" INNER JOIN (SELECT "qu_8"."co_1" AS "co_1", SUM("qu_9"."co_11") AS "co_12" FROM (SELECT "ta_5"."openId" AS "co_10", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_3" ON "ta_0"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_job" AS "ta_5" ON "ta_3"."payloadType" = \'Job\' AND "ta_3"."payloadOpenId" = "ta_5"."openId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" GROUP BY "co_10", "co_1") AS "qu_8" LEFT JOIN (SELECT "ta_5"."openId" AS "co_10", COUNT("ta_5"."jobName") AS "co_11" FROM "v_job" AS "ta_5" GROUP BY "co_10") AS "qu_9" ON "qu_8"."co_10" = "qu_9"."co_10" GROUP BY "co_1") AS "qu_0" ON "qu_1"."co_7" = "qu_0"."co_1" ORDER BY "租户名称" ASC LIMIT 10 OFFSET 0' def test_aggregate_query4(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT Tenant.name AS "租户", Job.name AS "职位", COUNT(ResumeHumaninfo.name) AS "简历(数量)" ' 'FROM recruitment ORDER BY "租户" LIMIT 10 OFFSET 0').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "租户", "qu_0"."co_2" AS "职位", "qu_0"."co_4" AS "简历(数量)" FROM (SELECT "qu_1"."co_1" AS "co_1", "qu_1"."co_2" AS "co_2", SUM("qu_2"."co_3") AS "co_4" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."name" AS "co_1", "ta_2"."jobName" AS "co_2" FROM "v_channels" AS "ta_3" INNER JOIN "v_projects" AS "ta_4" ON "ta_3"."openId" = "ta_4"."channelOpenId" INNER JOIN "v_flow" AS "ta_5" ON "ta_4"."openId" = "ta_5"."circuitForeignId" INNER JOIN "v_job" AS "ta_2" ON "ta_4"."payloadType" = \'Job\' AND "ta_4"."payloadOpenId" = "ta_2"."openId" INNER JOIN "v_resume" AS "ta_0" ON "ta_0"."openId" = "ta_5"."beanSourceId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_3"."tenant" = "ta_1"."name" GROUP BY "co_0", "co_1", "co_2") AS "qu_1" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_3" FROM "v_resume" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_1"."co_0" = "qu_2"."co_0" GROUP BY "co_1", "co_2") AS "qu_0" ORDER BY "租户" ASC LIMIT 10 OFFSET 0' def test_aggregate_query5(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT Tenant.name AS "租户名称", COUNT(Channel.name) AS "频道(数量)",' ' COUNT(Job.name) AS "职位(数量)" FROM recruitment WHERE Tenant.name IN (\'tenant-98\', \'maisui-70\')').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "租户名称", "qu_1"."co_3" AS "频道(数量)", "qu_0"."co_6" AS "职位(数量)" FROM (SELECT "qu_2"."co_1" AS "co_1", SUM("qu_3"."co_2") AS "co_3" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" WHERE "ta_1"."name" IN (\'tenant-98\', \'maisui-70\') GROUP BY "co_0", "co_1") AS "qu_2" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_2" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_3" ON "qu_2"."co_0" = "qu_3"."co_0" GROUP BY "co_1") AS "qu_1" INNER JOIN (SELECT "qu_4"."co_1" AS "co_1", SUM("qu_5"."co_5") AS "co_6" FROM (SELECT "ta_2"."openId" AS "co_4", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_3" ON "ta_0"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_job" AS "ta_2" ON "ta_3"."payloadType" = \'Job\' AND "ta_3"."payloadOpenId" = "ta_2"."openId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" WHERE "ta_1"."name" IN (\'tenant-98\', \'maisui-70\') GROUP BY "co_4", "co_1") AS "qu_4" LEFT JOIN (SELECT "ta_2"."openId" AS "co_4", COUNT("ta_2"."jobName") AS "co_5" FROM "v_job" AS "ta_2" GROUP BY "co_4") AS "qu_5" ON "qu_4"."co_4" = "qu_5"."co_4" GROUP BY "co_1") AS "qu_0" ON "qu_1"."co_1" = "qu_0"."co_1"' def test_aggregate_query6(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT Tenant.name AS "租户", COUNT(ResumeHumaninfo.name) AS "简历(数量)", AVG(ResumeHumaninfo.age) AS "平均年龄" ' 'FROM recruitment WHERE Tenant.name IN (\'tenant-98\', \'maisui-70\') ' 'ORDER BY "简历(数量)" LIMIT 10 OFFSET 0').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "租户", "qu_0"."co_4" AS "简历(数量)", "qu_0"."co_5" AS "平均年龄" FROM (SELECT "qu_1"."co_1" AS "co_1", SUM("qu_2"."co_2") AS "co_4", AVG("qu_2"."co_3") AS "co_5" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_2" INNER JOIN "v_projects" AS "ta_3" ON "ta_2"."openId" = "ta_3"."channelOpenId" INNER JOIN "v_flow" AS "ta_4" ON "ta_3"."openId" = "ta_4"."circuitForeignId" INNER JOIN "v_resume" AS "ta_0" ON "ta_0"."openId" = "ta_4"."beanSourceId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_2"."tenant" = "ta_1"."name" WHERE "ta_1"."name" IN (\'tenant-98\', \'maisui-70\') GROUP BY "co_0", "co_1") AS "qu_1" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_2", AVG("ta_0"."ageNormalized") AS "co_3" FROM "v_resume" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_1"."co_0" = "qu_2"."co_0" GROUP BY "co_1") AS "qu_0" ORDER BY "简历(数量)" ASC LIMIT 10 OFFSET 0' def test_aggregate_query7(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)", COUNT(Job.name) AS "平均年龄" ' 'FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_5" AS "简历(数量)", "qu_1"."co_2" AS "平均年龄" FROM (SELECT SUM("qu_2"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_job" AS "ta_0" GROUP BY "co_0") AS "qu_3" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."jobName") AS "co_1" FROM "v_job" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_3"."co_0" = "qu_2"."co_0") AS "qu_1" CROSS JOIN (SELECT SUM("qu_4"."co_4") AS "co_5" FROM (SELECT "ta_1"."openId" AS "co_3" FROM "v_resume" AS "ta_1" GROUP BY "co_3") AS "qu_5" LEFT JOIN (SELECT "ta_1"."openId" AS "co_3", COUNT("ta_1"."name") AS "co_4" FROM "v_resume" AS "ta_1" GROUP BY "co_3") AS "qu_4" ON "qu_5"."co_3" = "qu_4"."co_3") AS "qu_0"' def test_aggregate_query8(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)", COUNT(Job.name) AS "职位(数量)" ' ', COUNT(Channel.name) AS "频道数量" FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_6" AS "简历(数量)", "qu_1"."co_10" AS "职位(数量)", "qu_0"."co_7" AS "频道数量" FROM (SELECT "qu_2"."co_5" AS "co_6", "qu_3"."co_2" AS "co_7" FROM (SELECT SUM("qu_4"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_5" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_4" ON "qu_5"."co_0" = "qu_4"."co_0") AS "qu_3" CROSS JOIN (SELECT SUM("qu_6"."co_4") AS "co_5" FROM (SELECT "ta_1"."openId" AS "co_3" FROM "v_resume" AS "ta_1" GROUP BY "co_3") AS "qu_7" LEFT JOIN (SELECT "ta_1"."openId" AS "co_3", COUNT("ta_1"."name") AS "co_4" FROM "v_resume" AS "ta_1" GROUP BY "co_3") AS "qu_6" ON "qu_7"."co_3" = "qu_6"."co_3") AS "qu_2") AS "qu_0" CROSS JOIN (SELECT SUM("qu_8"."co_9") AS "co_10" FROM (SELECT "ta_2"."openId" AS "co_8" FROM "v_job" AS "ta_2" GROUP BY "co_8") AS "qu_9" LEFT JOIN (SELECT "ta_2"."openId" AS "co_8", COUNT("ta_2"."jobName") AS "co_9" FROM "v_job" AS "ta_2" GROUP BY "co_8") AS "qu_8" ON "qu_9"."co_8" = "qu_8"."co_8") AS "qu_1"' def test_aggregate_query9(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)", AVG(ResumeHumaninfo.age) AS "平均年龄" ' 'FROM recruitment WHERE Tenant.name IN (\'tenant-98\', \'maisui-70\') ').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_3" AS "简历(数量)", "qu_0"."co_4" AS "平均年龄" FROM (SELECT SUM("qu_1"."co_1") AS "co_3", AVG("qu_1"."co_2") AS "co_4" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_1" INNER JOIN "v_projects" AS "ta_2" ON "ta_1"."openId" = "ta_2"."channelOpenId" INNER JOIN "v_flow" AS "ta_3" ON "ta_2"."openId" = "ta_3"."circuitForeignId" INNER JOIN "v_resume" AS "ta_0" ON "ta_0"."openId" = "ta_3"."beanSourceId" INNER JOIN "v_tenant" AS "ta_4" ON "ta_1"."tenant" = "ta_4"."name" WHERE "ta_4"."name" IN (\'tenant-98\', \'maisui-70\') GROUP BY "co_0") AS "qu_2" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1", AVG("ta_0"."ageNormalized") AS "co_2" FROM "v_resume" AS "ta_0" GROUP BY "co_0") AS "qu_1" ON "qu_2"."co_0" = "qu_1"."co_0") AS "qu_0"' def test_aggregate_query10(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)", AVG(ResumeHumaninfo.age) AS "平均年龄" ' 'FROM recruitment WHERE Tenant.name IN (\'tenant-98\', \'maisui-70\') AND ResumeHumaninfo.age > 0').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_3" AS "简历(数量)", "qu_0"."co_4" AS "平均年龄" FROM (SELECT SUM("qu_1"."co_1") AS "co_3", AVG("qu_1"."co_2") AS "co_4" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_1" INNER JOIN "v_projects" AS "ta_2" ON "ta_1"."openId" = "ta_2"."channelOpenId" INNER JOIN "v_flow" AS "ta_3" ON "ta_2"."openId" = "ta_3"."circuitForeignId" INNER JOIN "v_resume" AS "ta_0" ON "ta_0"."openId" = "ta_3"."beanSourceId" INNER JOIN "v_tenant" AS "ta_4" ON "ta_1"."tenant" = "ta_4"."name" WHERE "ta_4"."name" IN (\'tenant-98\', \'maisui-70\') AND "ta_0"."ageNormalized" > 0 GROUP BY "co_0") AS "qu_2" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1", AVG("ta_0"."ageNormalized") AS "co_2" FROM "v_resume" AS "ta_0" WHERE "ta_0"."ageNormalized" > 0 GROUP BY "co_0") AS "qu_1" ON "qu_2"."co_0" = "qu_1"."co_0") AS "qu_0"' def test_aggregate_query11(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)", COUNT(Job.name) AS "职位(数量)" ' ', COUNT(Channel.name) AS "频道数量" FROM recruitment WHERE ResumeHumaninfo.age > 0').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_6" AS "简历(数量)", "qu_1"."co_10" AS "职位(数量)", "qu_0"."co_7" AS "频道数量" FROM (SELECT "qu_2"."co_5" AS "co_6", "qu_3"."co_2" AS "co_7" FROM (SELECT SUM("qu_4"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_1" ON "ta_0"."openId" = "ta_1"."channelOpenId" INNER JOIN "v_flow" AS "ta_2" ON "ta_1"."openId" = "ta_2"."circuitForeignId" INNER JOIN "v_resume" AS "ta_3" ON "ta_3"."openId" = "ta_2"."beanSourceId" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_0") AS "qu_5" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_4" ON "qu_5"."co_0" = "qu_4"."co_0") AS "qu_3" CROSS JOIN (SELECT SUM("qu_6"."co_4") AS "co_5" FROM (SELECT "ta_3"."openId" AS "co_3" FROM "v_resume" AS "ta_3" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_3") AS "qu_7" LEFT JOIN (SELECT "ta_3"."openId" AS "co_3", COUNT("ta_3"."name") AS "co_4" FROM "v_resume" AS "ta_3" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_3") AS "qu_6" ON "qu_7"."co_3" = "qu_6"."co_3") AS "qu_2") AS "qu_0" CROSS JOIN (SELECT SUM("qu_8"."co_9") AS "co_10" FROM (SELECT "ta_4"."openId" AS "co_8" FROM "v_flow" AS "ta_2" INNER JOIN "v_projects" AS "ta_1" ON "ta_1"."openId" = "ta_2"."circuitForeignId" INNER JOIN "v_job" AS "ta_4" ON "ta_1"."payloadType" = \'Job\' AND "ta_1"."payloadOpenId" = "ta_4"."openId" INNER JOIN "v_resume" AS "ta_3" ON "ta_3"."openId" = "ta_2"."beanSourceId" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_8") AS "qu_9" LEFT JOIN (SELECT "ta_4"."openId" AS "co_8", COUNT("ta_4"."jobName") AS "co_9" FROM "v_job" AS "ta_4" GROUP BY "co_8") AS "qu_8" ON "qu_9"."co_8" = "qu_8"."co_8") AS "qu_1"' def test_aggregate_query12(): class PredicateAppender(SqlPredicateAppender): def append(self, introspection: SqlNodeIntrospection, table: SqlTable): if table.name == 'Tenant': column = SqlColumn(table, SqlIdentifier("name")) else: column = SqlColumn(table, SqlIdentifier("tenant")) rhs = SqlLiteral(StringLiteral('tenant-98')) SqlComparison(column, O_EQ, rhs).inspect(introspection) sql_rewriter = SqlRewriter(recruitment_schema, sql_predicate_appender=PredicateAppender()) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)", COUNT(Job.name) AS "职位(数量)" ' ', COUNT(Channel.name) AS "频道数量" FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_6" AS "简历(数量)", "qu_1"."co_10" AS "职位(数量)", "qu_0"."co_7" AS "频道数量" FROM (SELECT "qu_2"."co_5" AS "co_6", "qu_3"."co_2" AS "co_7" FROM (SELECT SUM("qu_4"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_1" ON "ta_0"."openId" = "ta_1"."channelOpenId" INNER JOIN "v_flow" AS "ta_2" ON "ta_1"."openId" = "ta_2"."circuitForeignId" INNER JOIN "v_job" AS "ta_3" ON "ta_1"."payloadType" = \'Job\' AND "ta_1"."payloadOpenId" = "ta_3"."openId" INNER JOIN "v_resume" AS "ta_4" ON "ta_4"."openId" = "ta_2"."beanSourceId" WHERE "ta_0"."tenant" = \'tenant-98\' AND "ta_2"."tenant" = \'tenant-98\' AND "ta_3"."tenant" = \'tenant-98\' AND "ta_1"."tenant" = \'tenant-98\' AND "ta_4"."tenant" = \'tenant-98\' GROUP BY "co_0") AS "qu_5" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1" FROM "v_channels" AS "ta_0" WHERE "ta_0"."tenant" = \'tenant-98\' GROUP BY "co_0") AS "qu_4" ON "qu_5"."co_0" = "qu_4"."co_0") AS "qu_3" CROSS JOIN (SELECT SUM("qu_6"."co_4") AS "co_5" FROM (SELECT "ta_4"."openId" AS "co_3" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_1" ON "ta_0"."openId" = "ta_1"."channelOpenId" INNER JOIN "v_flow" AS "ta_2" ON "ta_1"."openId" = "ta_2"."circuitForeignId" INNER JOIN "v_job" AS "ta_3" ON "ta_1"."payloadType" = \'Job\' AND "ta_1"."payloadOpenId" = "ta_3"."openId" INNER JOIN "v_resume" AS "ta_4" ON "ta_4"."openId" = "ta_2"."beanSourceId" WHERE "ta_0"."tenant" = \'tenant-98\' AND "ta_2"."tenant" = \'tenant-98\' AND "ta_3"."tenant" = \'tenant-98\' AND "ta_1"."tenant" = \'tenant-98\' AND "ta_4"."tenant" = \'tenant-98\' GROUP BY "co_3") AS "qu_7" LEFT JOIN (SELECT "ta_4"."openId" AS "co_3", COUNT("ta_4"."name") AS "co_4" FROM "v_resume" AS "ta_4" WHERE "ta_4"."tenant" = \'tenant-98\' GROUP BY "co_3") AS "qu_6" ON "qu_7"."co_3" = "qu_6"."co_3") AS "qu_2") AS "qu_0" CROSS JOIN (SELECT SUM("qu_8"."co_9") AS "co_10" FROM (SELECT "ta_3"."openId" AS "co_8" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_1" ON "ta_0"."openId" = "ta_1"."channelOpenId" INNER JOIN "v_flow" AS "ta_2" ON "ta_1"."openId" = "ta_2"."circuitForeignId" INNER JOIN "v_job" AS "ta_3" ON "ta_1"."payloadType" = \'Job\' AND "ta_1"."payloadOpenId" = "ta_3"."openId" INNER JOIN "v_resume" AS "ta_4" ON "ta_4"."openId" = "ta_2"."beanSourceId" WHERE "ta_0"."tenant" = \'tenant-98\' AND "ta_2"."tenant" = \'tenant-98\' AND "ta_3"."tenant" = \'tenant-98\' AND "ta_1"."tenant" = \'tenant-98\' AND "ta_4"."tenant" = \'tenant-98\' GROUP BY "co_8") AS "qu_9" LEFT JOIN (SELECT "ta_3"."openId" AS "co_8", COUNT("ta_3"."jobName") AS "co_9" FROM "v_job" AS "ta_3" WHERE "ta_3"."tenant" = \'tenant-98\' GROUP BY "co_8") AS "qu_8" ON "qu_9"."co_8" = "qu_8"."co_8") AS "qu_1"' def test_aggregate_query13(): class PredicateAppender(SqlPredicateAppender): def append(self, introspection: SqlNodeIntrospection, table: SqlTable): if table.name == 'Tenant': column = SqlColumn(table, SqlIdentifier("name")) else: column = SqlColumn(table, SqlIdentifier("tenant")) rhs = SqlLiteral(StringLiteral('tenant-98')) SqlComparison(column, O_EQ, rhs).inspect(introspection) sql_rewriter = SqlRewriter(recruitment_schema, sql_predicate_appender=PredicateAppender()) parse_flatql('SELECT COUNT(Tenant.name) AS "租户(数量)", COUNT(Channel.name) AS "频道数量" FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_5" AS "租户(数量)", "qu_1"."co_2" AS "频道数量" FROM (SELECT SUM("qu_2"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" WHERE "ta_0"."tenant" = \'tenant-98\' AND "ta_1"."name" = \'tenant-98\' GROUP BY "co_0") AS "qu_3" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1" FROM "v_channels" AS "ta_0" WHERE "ta_0"."tenant" = \'tenant-98\' GROUP BY "co_0") AS "qu_2" ON "qu_3"."co_0" = "qu_2"."co_0") AS "qu_1" CROSS JOIN (SELECT SUM("qu_4"."co_4") AS "co_5" FROM (SELECT "ta_1"."name" AS "co_3" FROM "v_channels" AS "ta_0" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" WHERE "ta_0"."tenant" = \'tenant-98\' AND "ta_1"."name" = \'tenant-98\' GROUP BY "co_3") AS "qu_5" LEFT JOIN (SELECT "ta_1"."name" AS "co_3", COUNT("ta_1"."name") AS "co_4" FROM "v_tenant" AS "ta_1" WHERE "ta_1"."name" = \'tenant-98\' GROUP BY "co_3") AS "qu_4" ON "qu_5"."co_3" = "qu_4"."co_3") AS "qu_0"' def test_aggregate_query14(): class PredicateAppender(SqlPredicateAppender): def append(self, introspection: SqlNodeIntrospection, table: SqlTable): if table.name == 'Tenant': column = SqlColumn(table, SqlIdentifier("name")) else: column = SqlColumn(table, SqlIdentifier("tenant")) rhs = SqlLiteral(StringLiteral('tenant-98')) SqlComparison(column, O_EQ, rhs).inspect(introspection) sql_rewriter = SqlRewriter(recruitment_schema, sql_predicate_appender=PredicateAppender()) parse_flatql('SELECT Tenant.name AS "租户", COUNT(Job.name) AS "租户(数量)", COUNT(Channel.name) AS "频道数量" FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "租户", "qu_0"."co_6" AS "租户(数量)", "qu_1"."co_3" AS "频道数量" FROM (SELECT "qu_2"."co_1" AS "co_1", SUM("qu_3"."co_2") AS "co_3" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_2" ON "ta_0"."openId" = "ta_2"."channelOpenId" INNER JOIN "v_job" AS "ta_3" ON "ta_2"."payloadType" = \'Job\' AND "ta_2"."payloadOpenId" = "ta_3"."openId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" WHERE "ta_0"."tenant" = \'tenant-98\' AND "ta_3"."tenant" = \'tenant-98\' AND "ta_2"."tenant" = \'tenant-98\' AND "ta_1"."name" = \'tenant-98\' GROUP BY "co_0", "co_1") AS "qu_2" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_2" FROM "v_channels" AS "ta_0" WHERE "ta_0"."tenant" = \'tenant-98\' GROUP BY "co_0") AS "qu_3" ON "qu_2"."co_0" = "qu_3"."co_0" GROUP BY "co_1") AS "qu_1" INNER JOIN (SELECT "qu_4"."co_1" AS "co_1", SUM("qu_5"."co_5") AS "co_6" FROM (SELECT "ta_3"."openId" AS "co_4", "ta_1"."name" AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_2" ON "ta_0"."openId" = "ta_2"."channelOpenId" INNER JOIN "v_job" AS "ta_3" ON "ta_2"."payloadType" = \'Job\' AND "ta_2"."payloadOpenId" = "ta_3"."openId" INNER JOIN "v_tenant" AS "ta_1" ON "ta_0"."tenant" = "ta_1"."name" WHERE "ta_0"."tenant" = \'tenant-98\' AND "ta_3"."tenant" = \'tenant-98\' AND "ta_2"."tenant" = \'tenant-98\' AND "ta_1"."name" = \'tenant-98\' GROUP BY "co_4", "co_1") AS "qu_4" LEFT JOIN (SELECT "ta_3"."openId" AS "co_4", COUNT("ta_3"."jobName") AS "co_5" FROM "v_job" AS "ta_3" WHERE "ta_3"."tenant" = \'tenant-98\' GROUP BY "co_4") AS "qu_5" ON "qu_4"."co_4" = "qu_5"."co_4" GROUP BY "co_1") AS "qu_0" ON "qu_1"."co_1" = "qu_0"."co_1"' def test_aggregate_query15(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)"' ', COUNT(Channel.name) AS "频道数量", AVG(ResumeHumaninfo.age) AS "平均年龄" FROM recruitment WHERE ResumeHumaninfo.age > 0').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_6" AS "简历(数量)", "qu_1"."co_2" AS "频道数量", "qu_0"."co_7" AS "平均年龄" FROM (SELECT SUM("qu_2"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_1" ON "ta_0"."openId" = "ta_1"."channelOpenId" INNER JOIN "v_flow" AS "ta_2" ON "ta_1"."openId" = "ta_2"."circuitForeignId" INNER JOIN "v_resume" AS "ta_3" ON "ta_3"."openId" = "ta_2"."beanSourceId" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_0") AS "qu_3" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_3"."co_0" = "qu_2"."co_0") AS "qu_1" CROSS JOIN (SELECT SUM("qu_4"."co_4") AS "co_6", AVG("qu_4"."co_5") AS "co_7" FROM (SELECT "ta_3"."openId" AS "co_3" FROM "v_resume" AS "ta_3" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_3") AS "qu_5" LEFT JOIN (SELECT "ta_3"."openId" AS "co_3", COUNT("ta_3"."name") AS "co_4", AVG("ta_3"."ageNormalized") AS "co_5" FROM "v_resume" AS "ta_3" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_3") AS "qu_4" ON "qu_5"."co_3" = "qu_4"."co_3") AS "qu_0"' def test_aggregate_query16(): class PredicateAppender(SqlPredicateAppender): def append(self, introspection: SqlNodeIntrospection, table: SqlTable): if table.name == 'Tenant': column = SqlColumn(table, SqlIdentifier("name")) else: column = SqlColumn(table, SqlIdentifier("tenant")) rhs = SqlLiteral(StringLiteral('tenant-98')) SqlComparison(column, O_EQ, rhs).inspect(introspection) sql_rewriter = SqlRewriter(recruitment_schema, sql_predicate_appender=PredicateAppender()) parse_flatql('SELECT COUNT(ResumeHumaninfo.name) AS "简历(数量)" FROM recruitment ' 'WHERE Channel.openId = \'tip-c01616f4-2bfa-49ba-bb17-799a6b66a631\' AND Project.openId = \'tip-1562b097-b29a-4a1c-bd7e-b7ba162329d8\'').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_2" AS "简历(数量)" FROM (SELECT SUM("qu_1"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_1" INNER JOIN "v_projects" AS "ta_2" ON "ta_1"."openId" = "ta_2"."channelOpenId" INNER JOIN "v_flow" AS "ta_3" ON "ta_2"."openId" = "ta_3"."circuitForeignId" INNER JOIN "v_resume" AS "ta_0" ON "ta_0"."openId" = "ta_3"."beanSourceId" WHERE "ta_1"."openId" = \'tip-c01616f4-2bfa-49ba-bb17-799a6b66a631\' AND "ta_2"."openId" = \'tip-1562b097-b29a-4a1c-bd7e-b7ba162329d8\' AND "ta_1"."tenant" = \'tenant-98\' AND "ta_3"."tenant" = \'tenant-98\' AND "ta_2"."tenant" = \'tenant-98\' AND "ta_0"."tenant" = \'tenant-98\' GROUP BY "co_0") AS "qu_2" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1" FROM "v_resume" AS "ta_0" WHERE "ta_0"."tenant" = \'tenant-98\' GROUP BY "co_0") AS "qu_1" ON "qu_2"."co_0" = "qu_1"."co_0") AS "qu_0"' def test_aggregate_query17(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT AVG(ResumeHumaninfo.age) AS "平均年龄", (COUNT(ResumeHumaninfo.name) + 100) / AVG(ResumeHumaninfo.age) AS "简历(数量)"' ', COUNT(Channel.name) AS "频道数量" FROM recruitment WHERE ResumeHumaninfo.age > 0').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_6" AS "平均年龄", ("qu_0"."co_7" + 100) / "qu_0"."co_6" AS "简历(数量)", "qu_1"."co_2" AS "频道数量" FROM (SELECT SUM("qu_2"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_1" ON "ta_0"."openId" = "ta_1"."channelOpenId" INNER JOIN "v_flow" AS "ta_2" ON "ta_1"."openId" = "ta_2"."circuitForeignId" INNER JOIN "v_resume" AS "ta_3" ON "ta_3"."openId" = "ta_2"."beanSourceId" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_0") AS "qu_3" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT("ta_0"."name") AS "co_1" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_3"."co_0" = "qu_2"."co_0") AS "qu_1" CROSS JOIN (SELECT AVG("qu_4"."co_4") AS "co_6", SUM("qu_4"."co_5") AS "co_7" FROM (SELECT "ta_3"."openId" AS "co_3" FROM "v_resume" AS "ta_3" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_3") AS "qu_5" LEFT JOIN (SELECT "ta_3"."openId" AS "co_3", AVG("ta_3"."ageNormalized") AS "co_4", COUNT("ta_3"."name") AS "co_5" FROM "v_resume" AS "ta_3" WHERE "ta_3"."ageNormalized" > 0 GROUP BY "co_3") AS "qu_4" ON "qu_5"."co_3" = "qu_4"."co_3") AS "qu_0"' def test_aggregate_query18(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT COUNT(CASE WHEN ResumeHumaninfo.age IS NOT NULL THEN 1 ELSE NULL END) FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT COUNT(CASE WHEN "ta_0"."ageNormalized" IS NOT NULL THEN 1 ELSE NULL END) FROM "v_resume" AS "ta_0"' def test_aggregate_query19(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT ResumeHumaninfo.age + 10 AS "年龄", COUNT(CASE WHEN Channel.name IS NOT NULL THEN 1 ELSE NULL END) AS "频道数量" ' 'FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "年龄", "qu_0"."co_3" AS "频道数量" FROM (SELECT "qu_1"."co_1" AS "co_1", SUM("qu_2"."co_2") AS "co_3" FROM (SELECT "ta_0"."openId" AS "co_0", "ta_1"."ageNormalized" + 10 AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_2" ON "ta_0"."openId" = "ta_2"."channelOpenId" INNER JOIN "v_flow" AS "ta_3" ON "ta_2"."openId" = "ta_3"."circuitForeignId" INNER JOIN "v_resume" AS "ta_1" ON "ta_1"."openId" = "ta_3"."beanSourceId" GROUP BY "co_0", "co_1") AS "qu_1" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT(CASE WHEN "ta_0"."name" IS NOT NULL THEN 1 ELSE NULL END) AS "co_2" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_1"."co_0" = "qu_2"."co_0" GROUP BY "co_1") AS "qu_0"' def test_aggregate_query20(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT AVG(ResumeHumaninfo.age) + 10 AS "年龄", COUNT(CASE WHEN Channel.name IS NOT NULL THEN 1 ELSE NULL END) AS "频道数量" ' 'FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_3" + 10 AS "年龄", "qu_1"."co_2" AS "频道数量" FROM (SELECT SUM("qu_2"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_3" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT(CASE WHEN "ta_0"."name" IS NOT NULL THEN 1 ELSE NULL END) AS "co_1" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_3"."co_0" = "qu_2"."co_0") AS "qu_1" CROSS JOIN (SELECT AVG("ta_1"."ageNormalized") AS "co_3" FROM "v_resume" AS "ta_1") AS "qu_0"' def test_aggregate_query21(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT (AVG(ResumeHumaninfo.age) + 10) AS "年龄", COUNT(CASE WHEN Channel.name IS NOT NULL THEN 1 ELSE NULL END) AS "频道数量" ' 'FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT ("qu_0"."co_3" + 10) AS "年龄", "qu_1"."co_2" AS "频道数量" FROM (SELECT SUM("qu_2"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_3" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT(CASE WHEN "ta_0"."name" IS NOT NULL THEN 1 ELSE NULL END) AS "co_1" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_3"."co_0" = "qu_2"."co_0") AS "qu_1" CROSS JOIN (SELECT AVG("ta_1"."ageNormalized") AS "co_3" FROM "v_resume" AS "ta_1") AS "qu_0"' def test_aggregate_query22(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT (AVG(ResumeHumaninfo.age) + 10) * 100 - 10 AS "年龄", COUNT(CASE WHEN Channel.name IS NOT NULL THEN 1 ELSE NULL END) AS "频道数量" ' 'FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT ("qu_0"."co_3" + 10) * 100 - 10 AS "年龄", "qu_1"."co_2" AS "频道数量" FROM (SELECT SUM("qu_2"."co_1") AS "co_2" FROM (SELECT "ta_0"."openId" AS "co_0" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_3" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT(CASE WHEN "ta_0"."name" IS NOT NULL THEN 1 ELSE NULL END) AS "co_1" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_3"."co_0" = "qu_2"."co_0") AS "qu_1" CROSS JOIN (SELECT AVG("ta_1"."ageNormalized") AS "co_3" FROM "v_resume" AS "ta_1") AS "qu_0"' def test_aggregate_query23(): sql_rewriter = SqlRewriter(recruitment_schema) parse_flatql('SELECT CASE WHEN ResumeHumaninfo.age BETWEEN 0 AND 30 THEN \'青年\' ELSE \'壮年\' END AS "年龄段", ' 'COUNT(CASE WHEN Channel.name IS NOT NULL THEN 1 ELSE NULL END) AS "频道数量" ' 'FROM recruitment').rewrite(sql_rewriter) assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "年龄段", "qu_0"."co_3" AS "频道数量" FROM (SELECT "qu_1"."co_1" AS "co_1", SUM("qu_2"."co_2") AS "co_3" FROM (SELECT "ta_0"."openId" AS "co_0", CASE WHEN "ta_1"."ageNormalized" BETWEEN 0 AND 30 THEN \'青年\' ELSE \'壮年\' END AS "co_1" FROM "v_channels" AS "ta_0" INNER JOIN "v_projects" AS "ta_2" ON "ta_0"."openId" = "ta_2"."channelOpenId" INNER JOIN "v_flow" AS "ta_3" ON "ta_2"."openId" = "ta_3"."circuitForeignId" INNER JOIN "v_resume" AS "ta_1" ON "ta_1"."openId" = "ta_3"."beanSourceId" GROUP BY "co_0", "co_1") AS "qu_1" LEFT JOIN (SELECT "ta_0"."openId" AS "co_0", COUNT(CASE WHEN "ta_0"."name" IS NOT NULL THEN 1 ELSE NULL END) AS "co_2" FROM "v_channels" AS "ta_0" GROUP BY "co_0") AS "qu_2" ON "qu_1"."co_0" = "qu_2"."co_0" GROUP BY "co_1") AS "qu_0"'
147.706349
2,812
0.658777
6,896
37,222
3.291183
0.024797
0.035249
0.034896
0.014804
0.94488
0.935451
0.925626
0.909059
0.888571
0.862883
0
0.055603
0.146231
37,222
251
2,813
148.294821
0.65858
0
0
0.42623
0
0.42623
0.766831
0.08006
0
0
0
0
0.163934
1
0.174863
false
0
0.043716
0
0.240437
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
7f46094ac8040788fef022a89271c749812fee29
87
py
Python
fastpivot/__init__.py
SethEBaldwin/fastpivot
950ca50105346180cb4c42aacdd9418473860aaf
[ "MIT" ]
null
null
null
fastpivot/__init__.py
SethEBaldwin/fastpivot
950ca50105346180cb4c42aacdd9418473860aaf
[ "MIT" ]
null
null
null
fastpivot/__init__.py
SethEBaldwin/fastpivot
950ca50105346180cb4c42aacdd9418473860aaf
[ "MIT" ]
null
null
null
from fastpivot.pivot import pivot_table from fastpivot.pivot_sparse import pivot_sparse
43.5
47
0.896552
13
87
5.769231
0.461538
0.346667
0.48
0
0
0
0
0
0
0
0
0
0.08046
87
2
47
43.5
0.9375
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
7f5a5ee2bf8d7bc082cd055c16abcfe62b650843
49
py
Python
yolo/net_ori/__init__.py
cxz1418/yolo_resnet
d53e4d178bd7984ee8dd545a7e6c98e81641a4ee
[ "MIT" ]
null
null
null
yolo/net_ori/__init__.py
cxz1418/yolo_resnet
d53e4d178bd7984ee8dd545a7e6c98e81641a4ee
[ "MIT" ]
null
null
null
yolo/net_ori/__init__.py
cxz1418/yolo_resnet
d53e4d178bd7984ee8dd545a7e6c98e81641a4ee
[ "MIT" ]
null
null
null
import net import yolo_net import yolo_tiny_net
16.333333
20
0.857143
9
49
4.333333
0.444444
0.461538
0.666667
0
0
0
0
0
0
0
0
0
0.142857
49
3
20
16.333333
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
7f5ee45af325d7862f89b4085bd7f3d2bddafc5a
211
py
Python
api/waitlist/data/implants.py
testkil/tdf-waitlist
dfa54b4fd3bf2ff9acbd96a281e784e04a2187fa
[ "MIT" ]
null
null
null
api/waitlist/data/implants.py
testkil/tdf-waitlist
dfa54b4fd3bf2ff9acbd96a281e784e04a2187fa
[ "MIT" ]
null
null
null
api/waitlist/data/implants.py
testkil/tdf-waitlist
dfa54b4fd3bf2ff9acbd96a281e784e04a2187fa
[ "MIT" ]
null
null
null
from typing import List from . import esi def load_character_implants(character_id: int) -> List[int]: return list( esi.get("/v2/characters/%d/implants/" % character_id, character_id).json() )
23.444444
82
0.691943
29
211
4.862069
0.586207
0.234043
0.269504
0
0
0
0
0
0
0
0
0.00578
0.180095
211
8
83
26.375
0.809249
0
0
0
0
0
0.127962
0.127962
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
7
7fb8f78fb88cdf6e3ae5200e44100c1638003763
14,050
py
Python
test/test_patterns.py
DatHydroGuy/RayTracer
f870d3eb0408e52b92ee528bb6615187b762bede
[ "MIT" ]
null
null
null
test/test_patterns.py
DatHydroGuy/RayTracer
f870d3eb0408e52b92ee528bb6615187b762bede
[ "MIT" ]
null
null
null
test/test_patterns.py
DatHydroGuy/RayTracer
f870d3eb0408e52b92ee528bb6615187b762bede
[ "MIT" ]
null
null
null
import unittest from patterns import * from colours import Colour from primitives import * from math import pi class TestPattern(Pattern): def __init__(self, colour_a=WHITE, colour_b=BLACK): super().__init__(colour_a, colour_b) def pattern_at(self, point): return Colour(point.x, point.y, point.z) def pattern_at_shape(self, shape, point): return super(TestPattern, self).pattern_at_shape(shape, point) class PatternsTestCase(unittest.TestCase): def test_black_exists(self): # Arrange expected = Colour(0, 0, 0) # Act black = BLACK # Assert self.assertEqual(black, expected) def test_white_exists(self): # Arrange expected = Colour(1, 1, 1) # Act black = WHITE # Assert self.assertEqual(black, expected) def test_default_pattern_transformation(self): # Arrange expected = Matrix.identity(4) # Act c = TestPattern() # Assert self.assertEqual(c.transform, expected) def test_assigning_a_pattern_transformation(self): # Arrange c = TestPattern() expected = Matrix.translation(1, 2, 3) # Act c.transform = Matrix.translation(1, 2, 3) # Assert self.assertEqual(c.transform, expected) def test_a_pattern_with_an_object_transformation(self): # Arrange s = Sphere() s.transform = Matrix.scaling(2, 2, 2) p = TestPattern() expected = Colour(1, 1.5, 2) # Act c = p.pattern_at_shape(s, Point(2, 3, 4)) # Assert self.assertEqual(c, expected) def test_a_pattern_with_a_pattern_transformation(self): # Arrange s = Sphere() p = TestPattern() p.transform = Matrix.scaling(2, 2, 2) expected = Colour(1, 1.5, 2) # Act c = p.pattern_at_shape(s, Point(2, 3, 4)) # Assert self.assertEqual(c, expected) def test_a_pattern_with_an_object_and_a_pattern_transformation(self): # Arrange s = Sphere() s.transform = Matrix.scaling(2, 2, 2) p = TestPattern() p.transform = Matrix.translation(0.5, 1, 1.5) expected = Colour(0.75, 0.5, 0.25) # Act c = p.pattern_at_shape(s, Point(2.5, 3, 3.5)) # Assert self.assertEqual(c, expected) class StripedPatternTestCase(unittest.TestCase): def test_create_a_striped_pattern(self): # Arrange # Act pattern = StripePattern(WHITE, BLACK) # Assert self.assertEqual(pattern.a, WHITE) self.assertEqual(pattern.b, BLACK) def test_a_striped_pattern_is_constant_in_y(self): # Arrange # Act pattern = StripePattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 1, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 2, 0)), WHITE) def test_a_striped_pattern_is_constant_in_z(self): # Arrange # Act pattern = StripePattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 0, 1)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 0, 2)), WHITE) def test_a_striped_pattern_alternates_in_x(self): # Arrange # Act pattern = StripePattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0.9, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(1, 0, 0)), BLACK) self.assertEqual(pattern.pattern_at(Point(-0.1, 0, 0)), BLACK) self.assertEqual(pattern.pattern_at(Point(-1, 0, 0)), BLACK) self.assertEqual(pattern.pattern_at(Point(-1.1, 0, 0)), WHITE) def test_stripes_with_an_object_transformation(self): # Arrange s = Sphere() s.set_transform(Matrix.scaling(2, 2, 2)) pattern = StripePattern(WHITE, BLACK) # Act c = pattern.pattern_at_shape(s, Point(1.5, 0, 0)) # Assert self.assertEqual(c, WHITE) def test_stripes_with_a_pattern_transformation(self): # Arrange s = Sphere() pattern = StripePattern(WHITE, BLACK) pattern.transform = Matrix.scaling(2, 2, 2) # Act c = pattern.pattern_at_shape(s, Point(1.5, 0, 0)) # Assert self.assertEqual(c, WHITE) def test_stripes_with_both_an_object_and_a_pattern_transformation(self): # Arrange s = Sphere() s.set_transform(Matrix.scaling(2, 2, 2)) pattern = StripePattern(WHITE, BLACK) pattern.transform = Matrix.translation(0.5, 0, 0) # Act c = pattern.pattern_at_shape(s, Point(2.5, 0, 0)) # Assert self.assertEqual(c, WHITE) class GradientPatternTestCase(unittest.TestCase): def test_a_gradient_linearly_interpolates_between_colours(self): # Arrange # Act pattern = GradientPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0.25, 0, 0)), Colour(0.75, 0.75, 0.75)) self.assertEqual(pattern.pattern_at(Point(0.5, 0, 0)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(0.75, 0, 0)), Colour(0.25, 0.25, 0.25)) self.assertEqual(pattern.pattern_at(Point(1, 0, 0)), BLACK) class DoubleGradientPatternTestCase(unittest.TestCase): def test_a_double_gradient_linearly_interpolates_between_colours_in_first_half(self): # Arrange # Act pattern = DoubleGradientPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0.125, 0, 0)), Colour(0.75, 0.75, 0.75)) self.assertEqual(pattern.pattern_at(Point(0.25, 0, 0)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(0.375, 0, 0)), Colour(0.25, 0.25, 0.25)) self.assertEqual(pattern.pattern_at(Point(0.5, 0, 0)), BLACK) def test_a_double_gradient_reverse_linearly_interpolates_between_colours_in_second_half(self): # Arrange # Act pattern = DoubleGradientPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(1, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0.875, 0, 0)), Colour(0.75, 0.75, 0.75)) self.assertEqual(pattern.pattern_at(Point(0.75, 0, 0)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(0.625, 0, 0)), Colour(0.25, 0.25, 0.25)) self.assertEqual(pattern.pattern_at(Point(0.5, 0, 0)), BLACK) class RingPatternTestCase(unittest.TestCase): def test_a_ring_pattern_should_extend_in_both_x_and_z(self): # Arrange # Act pattern = RingPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(1, 0, 0)), BLACK) self.assertEqual(pattern.pattern_at(Point(0, 0, 1)), BLACK) self.assertEqual(pattern.pattern_at(Point(0.708, 0, 0.708)), BLACK) class GradientRingPatternTestCase(unittest.TestCase): def test_a_gradient_ring_linearly_interpolates_in_both_x_and_z_directions(self): # Arrange # Act pattern = GradientRingPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0.25, 0, 0)), Colour(0.75, 0.75, 0.75)) self.assertEqual(pattern.pattern_at(Point(0, 0, 0.5)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(0, 0, -0.75)), Colour(0.25, 0.25, 0.25)) self.assertEqual(pattern.pattern_at(Point(-1, 0, 0)), BLACK) def test_a_gradient_ring_repeats_in_both_x_and_z_directions(self): # Arrange val = 1 / (2 * sqrt(2)) # Act pattern = GradientRingPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(val, 0, -val)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(1, 0, 0)), BLACK) self.assertEqual(pattern.pattern_at(Point(-2 * val, 0, 2 * val)), BLACK) self.assertEqual(pattern.pattern_at(Point(-3 * val, 0, -3 * val)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(5 * val, 0, 5 * val)), Colour(0.5, 0.5, 0.5)) class DoubleGradientRingPatternTestCase(unittest.TestCase): def test_double_gradient_ring_linearly_interpolates_in_both_x_and_z_directions_in_first_half(self): # Arrange # Act pattern = DoubleGradientRingPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0.125, 0, 0)), Colour(0.75, 0.75, 0.75)) self.assertEqual(pattern.pattern_at(Point(0, 0, 0.25)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(0, 0, -0.375)), Colour(0.25, 0.25, 0.25)) self.assertEqual(pattern.pattern_at(Point(-0.5, 0, 0)), BLACK) def test_double_gradient_ring_linearly_interpolates_in_both_x_and_z_directions_in_second_half(self): # Arrange # Act pattern = DoubleGradientRingPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(1, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 0, 0.875)), Colour(0.75, 0.75, 0.75)) self.assertEqual(pattern.pattern_at(Point(0, 0, -0.75)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(-0.625, 0, 0)), Colour(0.25, 0.25, 0.25)) self.assertEqual(pattern.pattern_at(Point(0.5, 0, 0)), BLACK) def test_double_gradient_ring_repeats_in_both_x_and_z_directions(self): # Arrange val = 1 / (4 * sqrt(2)) # Act pattern = DoubleGradientRingPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(val, 0, -val)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(0.5, 0, 0)), BLACK) self.assertEqual(pattern.pattern_at(Point(-2 * val, 0, 2 * val)), BLACK) self.assertEqual(pattern.pattern_at(Point(-3 * val, 0, -3 * val)), Colour(0.5, 0.5, 0.5)) self.assertEqual(pattern.pattern_at(Point(0, 0, -1)), WHITE) self.assertEqual(pattern.pattern_at(Point(5 * val, 0, 5 * val)), Colour(0.5, 0.5, 0.5)) class CheckersPatternTestCase(unittest.TestCase): def test_checkers_should_repeat_in_x(self): # Arrange # Act pattern = CheckersPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0.99, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(1.01, 0, 0)), BLACK) def test_checkers_should_repeat_in_y(self): # Arrange # Act pattern = CheckersPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 0.99, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 1.01, 0)), BLACK) def test_checkers_should_repeat_in_z(self): # Arrange # Act pattern = CheckersPattern(WHITE, BLACK) # Assert self.assertEqual(pattern.pattern_at(Point(0, 0, 0)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 0, 0.99)), WHITE) self.assertEqual(pattern.pattern_at(Point(0, 0, 1.01)), BLACK) class BlendedPatternTestCase(unittest.TestCase): def setUp(self) -> None: self.ground = Plane() pattern1 = StripePattern(WHITE, BLACK) pattern2 = StripePattern(WHITE, BLACK) pattern2.transform = Matrix.rotation_y(pi / 2) ground_pattern = BlendedPattern(pattern1, pattern2) self.ground.material = Material() self.ground.material.pattern = ground_pattern def test_blended_stripes_at_right_angles_should_repeat_in_x(self): # Arrange GREY = Colour(0.5, 0.5, 0.5) # Act results1 = [self.ground.material.pattern.pattern_at_shape(self.ground, Point(x + 0.5, 0, 0.5)) for x in range(-5, 6)] results2 = [self.ground.material.pattern.pattern_at_shape(self.ground, Point(x + 0.5, 0, 1.5)) for x in range(-5, 6)] # Assert self.assertTrue(all([r == BLACK for i, r in enumerate(results1) if i == 0 % 2])) self.assertTrue(all([r == GREY for i, r in enumerate(results1) if i == 1 % 2])) self.assertTrue(all([r == GREY for i, r in enumerate(results2) if i == 0 % 2])) self.assertTrue(all([r == WHITE for i, r in enumerate(results2) if i == 1 % 2])) def test_blended_stripes_at_right_angles_should_repeat_in_z(self): # Arrange GREY = Colour(0.5, 0.5, 0.5) # Act results1 = [self.ground.material.pattern.pattern_at_shape(self.ground, Point(0.5, 0, x + 0.5)) for x in range(-5, 6)] results2 = [self.ground.material.pattern.pattern_at_shape(self.ground, Point(1.5, 0, x + 0.5)) for x in range(-5, 6)] # Assert self.assertTrue(all([r == WHITE for i, r in enumerate(results1) if i == 0 % 2])) self.assertTrue(all([r == GREY for i, r in enumerate(results1) if i == 1 % 2])) self.assertTrue(all([r == GREY for i, r in enumerate(results2) if i == 0 % 2])) self.assertTrue(all([r == BLACK for i, r in enumerate(results2) if i == 1 % 2])) if __name__ == '__main__': unittest.main()
35.301508
104
0.629893
1,948
14,050
4.377823
0.069815
0.0197
0.140713
0.231238
0.846623
0.793856
0.773804
0.752228
0.701571
0.690197
0
0.056798
0.239359
14,050
397
105
35.390428
0.741181
0.037794
0
0.446009
0
0
0.000596
0
0
0
0
0
0.413146
1
0.150235
false
0
0.023474
0.00939
0.230047
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
f689f807d137cb00d29cab3ac21e8a78eac92cc7
259
py
Python
chap5_the_matrix/chap5.py
lastone9182/CodingtheMatrix
b0d67e26940e59e51a7e2760b734c7de50490e1a
[ "MIT" ]
3
2018-01-11T07:48:06.000Z
2020-04-27T20:49:02.000Z
chap5_the_matrix/chap5.py
lastone9182/CodingtheMatrix
b0d67e26940e59e51a7e2760b734c7de50490e1a
[ "MIT" ]
null
null
null
chap5_the_matrix/chap5.py
lastone9182/CodingtheMatrix
b0d67e26940e59e51a7e2760b734c7de50490e1a
[ "MIT" ]
1
2021-01-26T07:25:48.000Z
2021-01-26T07:25:48.000Z
# [[0 for j in range(4)] for i in range(3)] R = {'a', 'b'} C = {'#','@','?'} { ((j+1)*10**i for j in range(4) for i in range(3) } M = Mat(({'a', 'b'}, {'#','@','?'}), {('a', '#'):1, ('a', '@'):2, ('a', '?'):3, ('b', '#'):10, ('b', '@'):20, ('b', '?'):30})
28.777778
125
0.305019
45
259
1.755556
0.422222
0.35443
0.151899
0.278481
0.607595
0.607595
0.607595
0.607595
0.607595
0.607595
0
0.080569
0.185328
259
8
126
32.375
0.293839
0
0
0
0
0
0.102326
0
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
1
null
1
0
1
0
0
0
0
0
1
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
f6ae5b80a2e1f5e69ef871e05a3bf2e1702673e2
383,174
py
Python
source/codegen/metadata/nidaqmx/functions.py
zhindes/grpc-device
616aa913963098b12d276693895b7eb946f82df4
[ "MIT" ]
null
null
null
source/codegen/metadata/nidaqmx/functions.py
zhindes/grpc-device
616aa913963098b12d276693895b7eb946f82df4
[ "MIT" ]
null
null
null
source/codegen/metadata/nidaqmx/functions.py
zhindes/grpc-device
616aa913963098b12d276693895b7eb946f82df4
[ "MIT" ]
null
null
null
functions = { 'AddCDAQSyncConnection': { 'parameters': [ { 'direction': 'in', 'name': 'portList', 'type': 'const char[]' } ], 'returns': 'int32' }, 'AddGlobalChansToTask': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelNames', 'type': 'const char[]' } ], 'returns': 'int32' }, 'AddNetworkDevice': { 'parameters': [ { 'direction': 'in', 'name': 'ipAddress', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'attemptReservation', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'deviceNameOut', 'size': { 'mechanism': 'ivi-dance', 'value': 'deviceNameOutBufferSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'deviceNameOutBufferSize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'AreConfiguredCDAQSyncPortsDisconnected': { 'parameters': [ { 'direction': 'in', 'name': 'chassisDevicesPorts', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'disconnectedPortsExist', 'type': 'bool32' } ], 'returns': 'int32' }, 'AutoConfigureCDAQSyncConnections': { 'parameters': [ { 'direction': 'in', 'name': 'chassisDevicesPorts', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' } ], 'returns': 'int32' }, 'CalculateReversePolyCoeff': { 'parameters': [ { 'direction': 'in', 'name': 'forwardCoeffs', 'size': { 'mechanism': 'len', 'value': 'numForwardCoeffsIn' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numForwardCoeffsIn', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'minValX', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxValX', 'type': 'float64' }, { 'direction': 'in', 'name': 'numPointsToCompute', 'type': 'int32' }, { 'direction': 'in', 'name': 'reversePolyOrder', 'type': 'int32' }, { 'direction': 'out', 'name': 'reverseCoeffs', 'size': { 'mechanism': 'custom-code', 'value': '(reversePolyOrder < 0) ? numForwardCoeffsIn : reversePolyOrder + 1' }, 'type': 'float64[]' } ], 'returns': 'int32' }, 'CfgAnlgEdgeRefTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Slope1', 'name': 'triggerSlope', 'type': 'int32' }, { 'direction': 'in', 'name': 'triggerLevel', 'type': 'float64' }, { 'direction': 'in', 'name': 'pretriggerSamples', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgAnlgEdgeStartTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Slope1', 'name': 'triggerSlope', 'type': 'int32' }, { 'direction': 'in', 'name': 'triggerLevel', 'type': 'float64' } ], 'returns': 'int32' }, 'CfgAnlgMultiEdgeRefTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSources', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'triggerSlopeArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const int32[]' }, { 'direction': 'in', 'name': 'triggerLevelArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'pretriggerSamples', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'arraySize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgAnlgMultiEdgeStartTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSources', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'triggerSlopeArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const int32[]' }, { 'direction': 'in', 'name': 'triggerLevelArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'arraySize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgAnlgWindowRefTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'WindowTriggerCondition1', 'name': 'triggerWhen', 'type': 'int32' }, { 'direction': 'in', 'name': 'windowTop', 'type': 'float64' }, { 'direction': 'in', 'name': 'windowBottom', 'type': 'float64' }, { 'direction': 'in', 'name': 'pretriggerSamples', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgAnlgWindowStartTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'WindowTriggerCondition1', 'name': 'triggerWhen', 'type': 'int32' }, { 'direction': 'in', 'name': 'windowTop', 'type': 'float64' }, { 'direction': 'in', 'name': 'windowBottom', 'type': 'float64' } ], 'returns': 'int32' }, 'CfgBurstHandshakingTimingExportClock': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'AcquisitionType', 'name': 'sampleMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'sampsPerChan', 'type': 'uInt64' }, { 'direction': 'in', 'name': 'sampleClkRate', 'type': 'float64' }, { 'direction': 'in', 'name': 'sampleClkOutpTerm', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Polarity2', 'name': 'sampleClkPulsePolarity', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Level1', 'name': 'pauseWhen', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Polarity2', 'name': 'readyEventActiveLevel', 'type': 'int32' } ], 'returns': 'int32' }, 'CfgBurstHandshakingTimingImportClock': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'AcquisitionType', 'name': 'sampleMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'sampsPerChan', 'type': 'uInt64' }, { 'direction': 'in', 'name': 'sampleClkRate', 'type': 'float64' }, { 'direction': 'in', 'name': 'sampleClkSrc', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'sampleClkActiveEdge', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Level1', 'name': 'pauseWhen', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Polarity2', 'name': 'readyEventActiveLevel', 'type': 'int32' } ], 'returns': 'int32' }, 'CfgChangeDetectionTiming': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'risingEdgeChan', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'fallingEdgeChan', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'AcquisitionType', 'name': 'sampleMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'sampsPerChan', 'type': 'uInt64' } ], 'returns': 'int32' }, 'CfgDigEdgeRefTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'triggerEdge', 'type': 'int32' }, { 'direction': 'in', 'name': 'pretriggerSamples', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgDigEdgeStartTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'triggerEdge', 'type': 'int32' } ], 'returns': 'int32' }, 'CfgDigPatternRefTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'triggerPattern', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'DigitalPatternCondition1', 'name': 'triggerWhen', 'type': 'int32' }, { 'direction': 'in', 'name': 'pretriggerSamples', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgDigPatternStartTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'triggerSource', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'triggerPattern', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'DigitalPatternCondition1', 'name': 'triggerWhen', 'type': 'int32' } ], 'returns': 'int32' }, 'CfgHandshakingTiming': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'AcquisitionType', 'name': 'sampleMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'sampsPerChan', 'type': 'uInt64' } ], 'returns': 'int32' }, 'CfgImplicitTiming': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'AcquisitionType', 'name': 'sampleMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'sampsPerChan', 'type': 'uInt64' } ], 'returns': 'int32' }, 'CfgInputBuffer': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgOutputBuffer': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgPipelinedSampClkTiming': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'source', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'rate', 'type': 'float64' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'activeEdge', 'type': 'int32' }, { 'direction': 'in', 'enum': 'AcquisitionType', 'name': 'sampleMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'sampsPerChan', 'type': 'uInt64' } ], 'returns': 'int32' }, 'CfgSampClkTiming': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'source', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'rate', 'type': 'float64' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'activeEdge', 'type': 'int32' }, { 'direction': 'in', 'enum': 'AcquisitionType', 'name': 'sampleMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'sampsPerChan', 'type': 'uInt64' } ], 'returns': 'int32' }, 'CfgTimeStartTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'when', 'type': 'CVIAbsoluteTime' }, { 'direction': 'in', 'enum': 'Timescale2', 'name': 'timescale', 'type': 'int32' } ], 'returns': 'int32' }, 'CfgWatchdogAOExpirStates': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelNames', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'expirStateArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const float64[]' }, { 'direction': 'in', 'enum': 'WatchdogAOOutputType', 'name': 'outputTypeArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const int32[]' }, { 'direction': 'in', 'name': 'arraySize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgWatchdogCOExpirStates': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelNames', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'WatchdogCOExpirState', 'name': 'expirStateArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const int32[]' }, { 'direction': 'in', 'name': 'arraySize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'CfgWatchdogDOExpirStates': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelNames', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'DigitalLineState', 'name': 'expirStateArray', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const int32[]' }, { 'direction': 'in', 'name': 'arraySize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'ClearTEDS': { 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' } ], 'returns': 'int32' }, 'ClearTask': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' } ], 'returns': 'int32' }, 'ConfigureLogging': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'filePath', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'LoggingMode', 'name': 'loggingMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'groupName', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'LoggingOperation', 'name': 'operation', 'type': 'int32' } ], 'returns': 'int32' }, 'ConfigureTEDS': { 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'filePath', 'type': 'const char[]' } ], 'returns': 'int32' }, 'ConnectTerms': { 'parameters': [ { 'direction': 'in', 'name': 'sourceTerminal', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'destinationTerminal', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InvertPolarity', 'name': 'signalModifiers', 'type': 'int32' } ], 'returns': 'int32' }, 'ControlWatchdogTask': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'WatchdogControlAction', 'name': 'action', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateAIAccel4WireDCVoltageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AccelUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AccelSensitivityUnits1', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'useExcitForScaling', 'type': 'bool32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIAccelChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AccelUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AccelSensitivityUnits1', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIAccelChargeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AccelUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AccelChargeSensitivityUnits', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIBridgeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'BridgeUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIChargeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ChargeUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAICurrentChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'CurrentUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'CurrentShuntResistorLocationWithDefault', 'name': 'shuntResistorLoc', 'type': 'int32' }, { 'direction': 'in', 'name': 'extShuntResistorVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAICurrentRMSChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'CurrentUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'CurrentShuntResistorLocationWithDefault', 'name': 'shuntResistorLoc', 'type': 'int32' }, { 'direction': 'in', 'name': 'extShuntResistorVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIForceBridgePolynomialChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ForceUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'forwardCoeffs', 'size': { 'mechanism': 'len', 'value': 'numForwardCoeffs' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numForwardCoeffs', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'reverseCoeffs', 'size': { 'mechanism': 'len', 'value': 'numReverseCoeffs' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numReverseCoeffs', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIForceBridgeTableChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ForceUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'electricalVals', 'size': { 'mechanism': 'len', 'value': 'numElectricalVals' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numElectricalVals', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'physicalVals', 'size': { 'mechanism': 'len', 'value': 'numPhysicalVals' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numPhysicalVals', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIForceBridgeTwoPointLinChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ForceUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'firstElectricalVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'secondElectricalVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'firstPhysicalVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'secondPhysicalVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIForceIEPEChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ForceIEPEUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ForceIEPESensorSensitivityUnits', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIFreqVoltageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'FrequencyUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'thresholdLevel', 'type': 'float64' }, { 'direction': 'in', 'name': 'hysteresis', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIMicrophoneChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'SoundPressureUnits1', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'micSensitivity', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxSndPressLevel', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIPosEddyCurrProxProbeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'LengthUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'EddyCurrentProxProbeSensitivityUnits', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIPosLVDTChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'LengthUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'LVDTSensitivityUnits1', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'voltageExcitFreq', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ACExcitWireMode', 'name': 'acExcitWireMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIPosRVDTChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AngleUnits1', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'RVDTSensitivityUnits1', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'voltageExcitFreq', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ACExcitWireMode', 'name': 'acExcitWireMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIPressureBridgePolynomialChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'PressureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'forwardCoeffs', 'size': { 'mechanism': 'len', 'value': 'numForwardCoeffs' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numForwardCoeffs', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'reverseCoeffs', 'size': { 'mechanism': 'len', 'value': 'numReverseCoeffs' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numReverseCoeffs', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIPressureBridgeTableChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'PressureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'electricalVals', 'size': { 'mechanism': 'len', 'value': 'numElectricalVals' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numElectricalVals', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'physicalVals', 'size': { 'mechanism': 'len', 'value': 'numPhysicalVals' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numPhysicalVals', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIPressureBridgeTwoPointLinChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'PressureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'firstElectricalVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'secondElectricalVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'firstPhysicalVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'secondPhysicalVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIRTDChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'RTDType1', 'name': 'rtdType', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'r0', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateAIResistanceChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ResistanceUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIRosetteStrainGageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'StrainGageRosetteType', 'name': 'rosetteType', 'type': 'int32' }, { 'direction': 'in', 'name': 'gageOrientation', 'type': 'float64' }, { 'direction': 'in', 'name': 'rosetteMeasTypes', 'size': { 'mechanism': 'len', 'value': 'numRosetteMeasTypes' }, 'type': 'const int32[]' }, { 'direction': 'in', 'name': 'numRosetteMeasTypes', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'StrainGageBridgeType1', 'name': 'strainConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'gageFactor', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalGageResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'poissonRatio', 'type': 'float64' }, { 'direction': 'in', 'name': 'leadWireResistance', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateAIStrainGageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'StrainUnits1', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'StrainGageBridgeType1', 'name': 'strainConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'gageFactor', 'type': 'float64' }, { 'direction': 'in', 'name': 'initialBridgeVoltage', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalGageResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'poissonRatio', 'type': 'float64' }, { 'direction': 'in', 'name': 'leadWireResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAITempBuiltInSensorChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateAIThrmcplChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ThermocoupleType1', 'name': 'thermocoupleType', 'type': 'int32' }, { 'direction': 'in', 'enum': 'CJCSource1', 'name': 'cjcSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'cjcVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'cjcChannel', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIThrmstrChanIex': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'a', 'type': 'float64' }, { 'direction': 'in', 'name': 'b', 'type': 'float64' }, { 'direction': 'in', 'name': 'c', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateAIThrmstrChanVex': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'a', 'type': 'float64' }, { 'direction': 'in', 'name': 'b', 'type': 'float64' }, { 'direction': 'in', 'name': 'c', 'type': 'float64' }, { 'direction': 'in', 'name': 'r1', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateAITorqueBridgePolynomialChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TorqueUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'forwardCoeffs', 'size': { 'mechanism': 'len', 'value': 'numForwardCoeffs' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numForwardCoeffs', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'reverseCoeffs', 'size': { 'mechanism': 'len', 'value': 'numReverseCoeffs' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numReverseCoeffs', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAITorqueBridgeTableChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TorqueUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'electricalVals', 'size': { 'mechanism': 'len', 'value': 'numElectricalVals' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numElectricalVals', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'physicalVals', 'size': { 'mechanism': 'len', 'value': 'numPhysicalVals' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numPhysicalVals', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAITorqueBridgeTwoPointLinChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TorqueUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'nominalBridgeResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'firstElectricalVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'secondElectricalVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'BridgeElectricalUnits', 'name': 'electricalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'firstPhysicalVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'secondPhysicalVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'BridgePhysicalUnits', 'name': 'physicalUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIVelocityIEPEChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'VelocityUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'sensitivity', 'type': 'float64' }, { 'direction': 'in', 'enum': 'VelocityIEPESensorSensitivityUnits', 'name': 'sensitivityUnits', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIVoltageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'VoltageUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIVoltageChanWithExcit': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'VoltageUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'BridgeConfiguration1', 'name': 'bridgeConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'useExcitForScaling', 'type': 'bool32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAIVoltageRMSChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'VoltageUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAOCurrentChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'CurrentUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateAOFuncGenChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'FuncGenType', 'name': 'type', 'type': 'int32' }, { 'direction': 'in', 'name': 'freq', 'type': 'float64' }, { 'direction': 'in', 'name': 'amplitude', 'type': 'float64' }, { 'direction': 'in', 'name': 'offset', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateAOVoltageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'VoltageUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCIAngEncoderChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'EncoderType2', 'name': 'decodingType', 'type': 'int32' }, { 'direction': 'in', 'name': 'zidxEnable', 'type': 'bool32' }, { 'direction': 'in', 'name': 'zidxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'EncoderZIndexPhase1', 'name': 'zidxPhase', 'type': 'int32' }, { 'direction': 'in', 'enum': 'AngleUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'pulsesPerRev', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'initialAngle', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCIAngVelocityChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'EncoderType2', 'name': 'decodingType', 'type': 'int32' }, { 'direction': 'in', 'enum': 'AngularVelocityUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'pulsesPerRev', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCICountEdgesChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'edge', 'type': 'int32' }, { 'direction': 'in', 'name': 'initialCount', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'CountDirection1', 'name': 'countDirection', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateCIDutyCycleChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minFreq', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxFreq', 'type': 'float64' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'edge', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCIFreqChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'FrequencyUnits3', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'edge', 'type': 'int32' }, { 'direction': 'in', 'enum': 'CounterFrequencyMethod', 'name': 'measMethod', 'type': 'int32' }, { 'direction': 'in', 'name': 'measTime', 'type': 'float64' }, { 'direction': 'in', 'name': 'divisor', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCIGPSTimestampChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'TimeUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'GpsSignalType1', 'name': 'syncMethod', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCILinEncoderChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'EncoderType2', 'name': 'decodingType', 'type': 'int32' }, { 'direction': 'in', 'name': 'zidxEnable', 'type': 'bool32' }, { 'direction': 'in', 'name': 'zidxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'EncoderZIndexPhase1', 'name': 'zidxPhase', 'type': 'int32' }, { 'direction': 'in', 'enum': 'LengthUnits3', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'distPerPulse', 'type': 'float64' }, { 'direction': 'in', 'name': 'initialPos', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCILinVelocityChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'EncoderType2', 'name': 'decodingType', 'type': 'int32' }, { 'direction': 'in', 'enum': 'VelocityUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'distPerPulse', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCIPeriodChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TimeUnits3', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'edge', 'type': 'int32' }, { 'direction': 'in', 'enum': 'CounterFrequencyMethod', 'name': 'measMethod', 'type': 'int32' }, { 'direction': 'in', 'name': 'measTime', 'type': 'float64' }, { 'direction': 'in', 'name': 'divisor', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCIPulseChanFreq': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'FrequencyUnits2', 'name': 'units', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateCIPulseChanTicks': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'sourceTerminal', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateCIPulseChanTime': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'DigitalWidthUnits3', 'name': 'units', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateCIPulseWidthChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TimeUnits3', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'startingEdge', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCISemiPeriodChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TimeUnits3', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCITwoEdgeSepChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TimeUnits3', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'firstEdge', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Edge1', 'name': 'secondEdge', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateCOPulseChanFreq': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'FrequencyUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Level1', 'name': 'idleState', 'type': 'int32' }, { 'direction': 'in', 'name': 'initialDelay', 'type': 'float64' }, { 'direction': 'in', 'name': 'freq', 'type': 'float64' }, { 'direction': 'in', 'name': 'dutyCycle', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateCOPulseChanTicks': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'sourceTerminal', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'Level1', 'name': 'idleState', 'type': 'int32' }, { 'direction': 'in', 'name': 'initialDelay', 'type': 'int32' }, { 'direction': 'in', 'name': 'lowTicks', 'type': 'int32' }, { 'direction': 'in', 'name': 'highTicks', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateCOPulseChanTime': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'counter', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'DigitalWidthUnits3', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'Level1', 'name': 'idleState', 'type': 'int32' }, { 'direction': 'in', 'name': 'initialDelay', 'type': 'float64' }, { 'direction': 'in', 'name': 'lowTime', 'type': 'float64' }, { 'direction': 'in', 'name': 'highTime', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateDIChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToLines', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'LineGrouping', 'name': 'lineGrouping', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateDOChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToLines', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'LineGrouping', 'name': 'lineGrouping', 'type': 'int32' } ], 'returns': 'int32' }, 'CreateLinScale': { 'parameters': [ { 'direction': 'in', 'name': 'name', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'slope', 'type': 'float64' }, { 'direction': 'in', 'name': 'yIntercept', 'type': 'float64' }, { 'direction': 'in', 'enum': 'UnitsPreScaled', 'name': 'preScaledUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'scaledUnits', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateMapScale': { 'parameters': [ { 'direction': 'in', 'name': 'name', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'prescaledMin', 'type': 'float64' }, { 'direction': 'in', 'name': 'prescaledMax', 'type': 'float64' }, { 'direction': 'in', 'name': 'scaledMin', 'type': 'float64' }, { 'direction': 'in', 'name': 'scaledMax', 'type': 'float64' }, { 'direction': 'in', 'enum': 'UnitsPreScaled', 'name': 'preScaledUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'scaledUnits', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreatePolynomialScale': { 'parameters': [ { 'direction': 'in', 'name': 'name', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'forwardCoeffs', 'size': { 'mechanism': 'len', 'value': 'numForwardCoeffsIn' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numForwardCoeffsIn', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'reverseCoeffs', 'size': { 'mechanism': 'len', 'value': 'numReverseCoeffsIn' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numReverseCoeffsIn', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'UnitsPreScaled', 'name': 'preScaledUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'scaledUnits', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIAccelChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AccelUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIBridgeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TEDSUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAICurrentChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TEDSUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'CurrentShuntResistorLocationWithDefault', 'name': 'shuntResistorLoc', 'type': 'int32' }, { 'direction': 'in', 'name': 'extShuntResistorVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIForceBridgeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ForceUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIForceIEPEChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ForceIEPEUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIMicrophoneChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'SoundPressureUnits1', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'maxSndPressLevel', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIPosLVDTChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'LengthUnits2', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'voltageExcitFreq', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ACExcitWireMode', 'name': 'acExcitWireMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIPosRVDTChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'AngleUnits1', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'voltageExcitFreq', 'type': 'float64' }, { 'direction': 'in', 'enum': 'ACExcitWireMode', 'name': 'acExcitWireMode', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIPressureBridgeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'PressureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIRTDChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateTEDSAIResistanceChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TEDSUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIStrainGageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'StrainUnits1', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'initialBridgeVoltage', 'type': 'float64' }, { 'direction': 'in', 'name': 'leadWireResistance', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIThrmcplChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'CJCSource1', 'name': 'cjcSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'cjcVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'cjcChannel', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIThrmstrChanIex': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'currentExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'currentExcitVal', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateTEDSAIThrmstrChanVex': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TemperatureUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ResistanceConfiguration', 'name': 'resistanceConfig', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'r1', 'type': 'float64' } ], 'returns': 'int32' }, 'CreateTEDSAITorqueBridgeChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TorqueUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIVoltageChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TEDSUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTEDSAIVoltageChanWithExcit': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'nameToAssignToChannel', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'InputTermCfgWithDefault', 'name': 'terminalConfig', 'type': 'int32' }, { 'direction': 'in', 'name': 'minVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'maxVal', 'type': 'float64' }, { 'direction': 'in', 'enum': 'TEDSUnits', 'name': 'units', 'type': 'int32' }, { 'direction': 'in', 'enum': 'ExcitationSource', 'name': 'voltageExcitSource', 'type': 'int32' }, { 'direction': 'in', 'name': 'voltageExcitVal', 'type': 'float64' }, { 'direction': 'in', 'name': 'customScaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTableScale': { 'parameters': [ { 'direction': 'in', 'name': 'name', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'prescaledVals', 'size': { 'mechanism': 'len', 'value': 'numPrescaledValsIn' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numPrescaledValsIn', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'scaledVals', 'size': { 'mechanism': 'len', 'value': 'numScaledValsIn' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'numScaledValsIn', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'UnitsPreScaled', 'name': 'preScaledUnits', 'type': 'int32' }, { 'direction': 'in', 'name': 'scaledUnits', 'type': 'const char[]' } ], 'returns': 'int32' }, 'CreateTask': { 'init_method': True, 'parameters': [ { 'direction': 'in', 'is_session_name': True, 'name': 'sessionName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'task', 'type': 'TaskHandle' } ], 'returns': 'int32' }, 'CreateWatchdogTimerTask': { 'init_method': True, 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'is_session_name': True, 'name': 'sessionName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'lines', 'repeating_argument': True, 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'DigitalLineState', 'include_in_proto': False, 'name': 'expState', 'repeating_argument': True, 'type': 'int32' }, { 'direction': 'in', 'grpc_type': 'repeated WatchdogExpChannelsAndState', 'is_compound_type': True, 'max_length': 96, 'name': 'expStates', 'repeated_var_args': True } ], 'returns': 'int32' }, 'CreateWatchdogTimerTaskEx': { 'init_method': True, 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'is_session_name': True, 'name': 'sessionName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' } ], 'returns': 'int32' }, 'DeleteNetworkDevice': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'DeleteSavedGlobalChan': { 'parameters': [ { 'direction': 'in', 'name': 'channelName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'DeleteSavedScale': { 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'DeleteSavedTask': { 'parameters': [ { 'direction': 'in', 'name': 'taskName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'DeviceSupportsCal': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'calSupported', 'type': 'bool32' } ], 'returns': 'int32' }, 'DisableRefTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' } ], 'returns': 'int32' }, 'DisableStartTrig': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' } ], 'returns': 'int32' }, 'DisconnectTerms': { 'parameters': [ { 'direction': 'in', 'name': 'sourceTerminal', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'destinationTerminal', 'type': 'const char[]' } ], 'returns': 'int32' }, 'ExportSignal': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'Signal', 'name': 'signalID', 'type': 'int32' }, { 'direction': 'in', 'name': 'outputTerminal', 'type': 'const char[]' } ], 'returns': 'int32' }, 'GetAIChanCalCalDate': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'year', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'month', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'day', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'hour', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'minute', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetAIChanCalExpDate': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'year', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'month', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'day', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'hour', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'minute', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetAnalogPowerUpStates': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'channelName', 'repeating_argument': True, 'type': 'const char[]' }, { 'direction': 'out', 'include_in_proto': False, 'name': 'state', 'repeating_argument': True, 'type': 'float64' }, { 'direction': 'in', 'enum': 'PowerUpChannelType', 'include_in_proto': False, 'name': 'channelType', 'repeating_argument': True, 'type': 'int32' }, { 'direction': 'in', 'grpc_type': 'repeated AnalogPowerUpChannelAndType', 'is_compound_type': True, 'max_length': 96, 'name': 'channels', 'repeated_var_args': True }, { 'direction': 'out', 'grpc_type': 'repeated double', 'max_length': 96, 'name': 'powerUpStates', 'repeated_var_args': True } ], 'returns': 'int32' }, 'GetArmStartTrigTimestampVal': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetArmStartTrigTrigWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetAutoConfiguredCDAQSyncConnections': { 'parameters': [ { 'direction': 'out', 'name': 'portList', 'size': { 'mechanism': 'ivi-dance', 'value': 'portListSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'portListSize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetBufferAttributeUInt32': { 'cname': 'DAQmxGetBufferAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'BufferAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetCalInfoAttributeBool': { 'cname': 'DAQmxGetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetCalInfoAttributeDouble': { 'cname': 'DAQmxGetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetCalInfoAttributeString': { 'cname': 'DAQmxGetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetCalInfoAttributeUInt32': { 'cname': 'DAQmxGetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetChanAttributeBool': { 'cname': 'DAQmxGetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetChanAttributeDouble': { 'cname': 'DAQmxGetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetChanAttributeDoubleArray': { 'cname': 'DAQmxGetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetChanAttributeInt32': { 'cname': 'DAQmxGetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetChanAttributeString': { 'cname': 'DAQmxGetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetChanAttributeUInt32': { 'cname': 'DAQmxGetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeBool': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeDouble': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeDoubleArray': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeInt32': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeInt32Array': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'int32[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeString': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeUInt32': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDeviceAttributeUInt32Array': { 'cname': 'DAQmxGetDeviceAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'DeviceAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'uInt32[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetDigitalLogicFamilyPowerUpState': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'logicFamily', 'type': 'int32' } ], 'returns': 'int32' }, 'GetDigitalPowerUpStates': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'channelName', 'repeating_argument': True, 'type': 'const char[]' }, { 'direction': 'out', 'enum': 'PowerUpStates', 'include_in_proto': False, 'name': 'state', 'repeating_argument': True, 'type': 'int32' }, { 'direction': 'in', 'grpc_type': 'repeated string', 'max_length': 96, 'name': 'channelName', 'repeated_var_args': True }, { 'direction': 'out', 'grpc_type': 'repeated PowerUpStates', 'max_length': 96, 'name': 'powerUpStates', 'repeated_var_args': True } ], 'returns': 'int32' }, 'GetDigitalPullUpPullDownStates': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'channelName', 'repeating_argument': True, 'type': 'const char[]' }, { 'direction': 'out', 'enum': 'ResistorState', 'include_in_proto': False, 'name': 'state', 'repeating_argument': True, 'type': 'int32' }, { 'direction': 'in', 'grpc_type': 'repeated string', 'max_length': 96, 'name': 'channelName', 'repeated_var_args': True }, { 'direction': 'out', 'grpc_type': 'repeated ResistorState', 'max_length': 96, 'name': 'pullUpPullDownStates', 'repeated_var_args': True } ], 'returns': 'int32' }, 'GetDisconnectedCDAQSyncPorts': { 'parameters': [ { 'direction': 'out', 'name': 'portList', 'size': { 'mechanism': 'ivi-dance', 'value': 'portListSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'portListSize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetErrorString': { 'parameters': [ { 'direction': 'in', 'name': 'errorCode', 'type': 'int32' }, { 'direction': 'out', 'name': 'errorString', 'size': { 'mechanism': 'ivi-dance', 'value': 'bufferSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'bufferSize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetExportedSignalAttributeBool': { 'cname': 'DAQmxGetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetExportedSignalAttributeDouble': { 'cname': 'DAQmxGetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetExportedSignalAttributeInt32': { 'cname': 'DAQmxGetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetExportedSignalAttributeString': { 'cname': 'DAQmxGetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetExportedSignalAttributeUInt32': { 'cname': 'DAQmxGetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetExtendedErrorInfo': { 'parameters': [ { 'direction': 'out', 'name': 'errorString', 'size': { 'mechanism': 'ivi-dance', 'value': 'bufferSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'bufferSize', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetFirstSampClkWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetFirstSampTimestampVal': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetNthTaskChannel': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'index', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'buffer', 'size': { 'mechanism': 'ivi-dance', 'value': 'bufferSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'bufferSize', 'type': 'int32' } ], 'returns': 'int32' }, 'GetNthTaskDevice': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'index', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'buffer', 'size': { 'mechanism': 'ivi-dance', 'value': 'bufferSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'bufferSize', 'type': 'int32' } ], 'returns': 'int32' }, 'GetNthTaskReadChannel': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'index', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'buffer', 'size': { 'mechanism': 'ivi-dance', 'value': 'bufferSize' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'bufferSize', 'type': 'int32' } ], 'returns': 'int32' }, 'GetPersistedChanAttributeBool': { 'cname': 'DAQmxGetPersistedChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PersistedChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPersistedChanAttributeString': { 'cname': 'DAQmxGetPersistedChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PersistedChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPersistedScaleAttributeBool': { 'cname': 'DAQmxGetPersistedScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PersistedScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPersistedScaleAttributeString': { 'cname': 'DAQmxGetPersistedScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PersistedScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPersistedTaskAttributeBool': { 'cname': 'DAQmxGetPersistedTaskAttribute', 'parameters': [ { 'direction': 'in', 'name': 'taskName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PersistedTaskAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPersistedTaskAttributeString': { 'cname': 'DAQmxGetPersistedTaskAttribute', 'parameters': [ { 'direction': 'in', 'name': 'taskName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PersistedTaskAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeBool': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeBytes': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'uInt8[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeDouble': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeDoubleArray': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeInt32': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeInt32Array': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'int32[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeString': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeUInt32': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetPhysicalChanAttributeUInt32Array': { 'cname': 'DAQmxGetPhysicalChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'PhysicalChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'uInt32[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetReadAttributeBool': { 'cname': 'DAQmxGetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetReadAttributeDouble': { 'cname': 'DAQmxGetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetReadAttributeInt32': { 'cname': 'DAQmxGetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetReadAttributeString': { 'cname': 'DAQmxGetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetReadAttributeUInt32': { 'cname': 'DAQmxGetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetReadAttributeUInt64': { 'cname': 'DAQmxGetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetRealTimeAttributeBool': { 'cname': 'DAQmxGetRealTimeAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'RealTimeAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetRealTimeAttributeInt32': { 'cname': 'DAQmxGetRealTimeAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'RealTimeAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetRealTimeAttributeUInt32': { 'cname': 'DAQmxGetRealTimeAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'RealTimeAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetRefTrigTimestampVal': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetScaleAttributeDouble': { 'cname': 'DAQmxGetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetScaleAttributeDoubleArray': { 'cname': 'DAQmxGetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetScaleAttributeInt32': { 'cname': 'DAQmxGetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetScaleAttributeString': { 'cname': 'DAQmxGetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetSelfCalLastDateAndTime': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'year', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'month', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'day', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'hour', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'minute', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetStartTrigTimestampVal': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetStartTrigTrigWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetSyncPulseTimeWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'GetSystemInfoAttributeString': { 'cname': 'DAQmxGetSystemInfoAttribute', 'parameters': [ { 'direction': 'in', 'grpc_type': 'SystemAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetSystemInfoAttributeUInt32': { 'cname': 'DAQmxGetSystemInfoAttribute', 'parameters': [ { 'direction': 'in', 'grpc_type': 'SystemAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTaskAttributeBool': { 'cname': 'DAQmxGetTaskAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TaskAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTaskAttributeString': { 'cname': 'DAQmxGetTaskAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TaskAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTaskAttributeUInt32': { 'cname': 'DAQmxGetTaskAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TaskAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeBool': { 'cname': 'DAQmxGetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeDouble': { 'cname': 'DAQmxGetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeExBool': { 'cname': 'DAQmxGetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeExDouble': { 'cname': 'DAQmxGetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeExInt32': { 'cname': 'DAQmxGetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeExString': { 'cname': 'DAQmxGetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeExTimestamp': { 'cname': 'DAQmxGetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'CVIAbsoluteTime' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeExUInt32': { 'cname': 'DAQmxGetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeExUInt64': { 'cname': 'DAQmxGetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeInt32': { 'cname': 'DAQmxGetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeString': { 'cname': 'DAQmxGetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeTimestamp': { 'cname': 'DAQmxGetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'CVIAbsoluteTime' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeUInt32': { 'cname': 'DAQmxGetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTimingAttributeUInt64': { 'cname': 'DAQmxGetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeBool': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeDouble': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeDoubleArray': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeInt32': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeInt32Array': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'int32[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeString': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeTimestamp': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'CVIAbsoluteTime' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetTrigAttributeUInt32': { 'cname': 'DAQmxGetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWatchdogAttributeBool': { 'cname': 'DAQmxGetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWatchdogAttributeDouble': { 'cname': 'DAQmxGetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWatchdogAttributeInt32': { 'cname': 'DAQmxGetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWatchdogAttributeString': { 'cname': 'DAQmxGetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWriteAttributeBool': { 'cname': 'DAQmxGetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWriteAttributeDouble': { 'cname': 'DAQmxGetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWriteAttributeInt32': { 'cname': 'DAQmxGetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWriteAttributeString': { 'cname': 'DAQmxGetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'size': { 'mechanism': 'ivi-dance', 'value': 'size' }, 'type': 'char[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWriteAttributeUInt32': { 'cname': 'DAQmxGetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'GetWriteAttributeUInt64': { 'cname': 'DAQmxGetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'IsTaskDone': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'isTaskDone', 'type': 'bool32' } ], 'returns': 'int32' }, 'LoadTask': { 'init_method': True, 'parameters': [ { 'direction': 'in', 'is_session_name': True, 'name': 'sessionName', 'type': 'const char[]' }, { 'direction': 'out', 'name': 'task', 'type': 'TaskHandle' } ], 'returns': 'int32' }, 'ReadAnalogF64': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadAnalogScalarF64': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadBinaryI16': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'coerced': True, 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'int16[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadBinaryI32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'int32[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadBinaryU16': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'coerced': True, 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt16[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadBinaryU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt32[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCounterF64': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCounterF64Ex': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCounterScalarF64': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCounterScalarU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCounterU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt32[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCounterU32Ex': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt32[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCtrFreq': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'interleaved', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArrayFrequency', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'float64[]' }, { 'direction': 'out', 'name': 'readArrayDutyCycle', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCtrFreqScalar': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'frequency', 'type': 'float64' }, { 'direction': 'out', 'name': 'dutyCycle', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCtrTicks': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'interleaved', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArrayHighTicks', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt32[]' }, { 'direction': 'out', 'name': 'readArrayLowTicks', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt32[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCtrTicksScalar': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'highTicks', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'lowTicks', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCtrTime': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'interleaved', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArrayHighTime', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'float64[]' }, { 'direction': 'out', 'name': 'readArrayLowTime', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'float64[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadCtrTimeScalar': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'highTime', 'type': 'float64' }, { 'direction': 'out', 'name': 'lowTime', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadDigitalLines': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInBytes' }, 'type': 'uInt8[]' }, { 'direction': 'in', 'name': 'arraySizeInBytes', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'out', 'name': 'numBytesPerSamp', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadDigitalScalarU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadDigitalU16': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'coerced': True, 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt16[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadDigitalU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt32[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadDigitalU8': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'fillMode', 'type': 'int32' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInSamps' }, 'type': 'uInt8[]' }, { 'direction': 'in', 'name': 'arraySizeInSamps', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsPerChanRead', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'ReadRaw': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'readArray', 'size': { 'mechanism': 'passed-in', 'value': 'arraySizeInBytes' }, 'type': 'uInt8[]' }, { 'direction': 'in', 'name': 'arraySizeInBytes', 'type': 'uInt32' }, { 'direction': 'out', 'name': 'sampsRead', 'type': 'int32' }, { 'direction': 'out', 'name': 'numBytesPerSamp', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'RegisterDoneEvent': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'options', 'type': 'uInt32' }, { 'callback_params': [ { 'direction': 'out', 'include_in_proto': False, 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'status', 'type': 'int32' } ], 'direction': 'in', 'include_in_proto': False, 'name': 'callbackFunction', 'type': 'DAQmxDoneEventCallbackPtr' }, { 'callback_token': True, 'direction': 'in', 'include_in_proto': False, 'name': 'callbackData', 'pointer': True, 'type': 'void' } ], 'returns': 'int32', 'stream_response': True }, 'RegisterEveryNSamplesEvent': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'EveryNSamplesEventType', 'name': 'everyNSamplesEventType', 'type': 'int32' }, { 'direction': 'in', 'name': 'nSamples', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'options', 'type': 'uInt32' }, { 'callback_params': [ { 'direction': 'out', 'include_in_proto': False, 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'enum': 'EveryNSamplesEventType', 'name': 'everyNSamplesEventType', 'type': 'int32' }, { 'direction': 'out', 'name': 'nSamples', 'type': 'uInt32' } ], 'direction': 'in', 'include_in_proto': False, 'name': 'callbackFunction', 'type': 'DAQmxEveryNSamplesEventCallbackPtr' }, { 'callback_token': True, 'direction': 'in', 'include_in_proto': False, 'name': 'callbackData', 'pointer': True, 'type': 'void' } ], 'returns': 'int32', 'stream_response': True }, 'RegisterSignalEvent': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'Signal2', 'name': 'signalID', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'options', 'type': 'uInt32' }, { 'callback_params': [ { 'direction': 'out', 'include_in_proto': False, 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'out', 'name': 'signalID', 'type': 'int32' } ], 'direction': 'in', 'include_in_proto': False, 'name': 'callbackFunction', 'type': 'DAQmxSignalEventCallbackPtr' }, { 'callback_token': True, 'direction': 'in', 'include_in_proto': False, 'name': 'callbackData', 'pointer': True, 'type': 'void' } ], 'returns': 'int32', 'stream_response': True }, 'RemoveCDAQSyncConnection': { 'parameters': [ { 'direction': 'in', 'name': 'portList', 'type': 'const char[]' } ], 'returns': 'int32' }, 'ReserveNetworkDevice': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'overrideReservation', 'type': 'bool32' } ], 'returns': 'int32' }, 'ResetBufferAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'BufferAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetChanAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetDevice': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'ResetExportedSignalAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetReadAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetRealTimeAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'RealTimeAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetTimingAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetTimingAttributeEx': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetTrigAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetWatchdogAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'ResetWriteAttribute': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' } ], 'returns': 'int32' }, 'SaveGlobalChan': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelName', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'saveAs', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'author', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'SaveOptions', 'name': 'options', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SaveScale': { 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'saveAs', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'author', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'SaveOptions', 'name': 'options', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SaveTask': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'saveAs', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'author', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'SaveOptions', 'name': 'options', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SelfCal': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'SelfTestDevice': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'SetAIChanCalCalDate': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelName', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'year', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'month', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'day', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'hour', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'minute', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetAIChanCalExpDate': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channelName', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'year', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'month', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'day', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'hour', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'minute', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetAnalogPowerUpStates': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'channelNames', 'repeating_argument': True, 'type': 'const char[]' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'state', 'repeating_argument': True, 'type': 'float64' }, { 'direction': 'in', 'enum': 'PowerUpChannelType', 'include_in_proto': False, 'name': 'channelType', 'repeating_argument': True, 'type': 'int32' }, { 'direction': 'in', 'grpc_type': 'repeated AnalogPowerUpChannelsAndState', 'is_compound_type': True, 'max_length': 96, 'name': 'powerUpStates', 'repeated_var_args': True } ], 'returns': 'int32' }, 'SetArmStartTrigTrigWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'SetBufferAttributeUInt32': { 'cname': 'DAQmxSetBufferAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'BufferAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetCalInfoAttributeBool': { 'cname': 'DAQmxSetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetCalInfoAttributeDouble': { 'cname': 'DAQmxSetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetCalInfoAttributeString': { 'cname': 'DAQmxSetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetCalInfoAttributeUInt32': { 'cname': 'DAQmxSetCalInfoAttribute', 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'CalibrationInfoAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetChanAttributeBool': { 'cname': 'DAQmxSetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetChanAttributeDouble': { 'cname': 'DAQmxSetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetChanAttributeDoubleArray': { 'cname': 'DAQmxSetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'size': { 'mechanism': 'len', 'value': 'size' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetChanAttributeInt32': { 'cname': 'DAQmxSetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetChanAttributeString': { 'cname': 'DAQmxSetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetChanAttributeUInt32': { 'cname': 'DAQmxSetChanAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'channel', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ChannelAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetDigitalLogicFamilyPowerUpState': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'LogicFamily', 'name': 'logicFamily', 'type': 'int32' } ], 'returns': 'int32' }, 'SetDigitalPowerUpStates': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'channelNames', 'repeating_argument': True, 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'PowerUpStates', 'include_in_proto': False, 'name': 'state', 'repeating_argument': True, 'type': 'int32' }, { 'direction': 'in', 'grpc_type': 'repeated DigitalPowerUpChannelsAndState', 'is_compound_type': True, 'max_length': 96, 'name': 'powerUpStates', 'repeated_var_args': True } ], 'returns': 'int32' }, 'SetDigitalPullUpPullDownStates': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' }, { 'direction': 'in', 'include_in_proto': False, 'name': 'channelNames', 'repeating_argument': True, 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'ResistorState', 'include_in_proto': False, 'name': 'state', 'repeating_argument': True, 'type': 'int32' }, { 'direction': 'in', 'grpc_type': 'repeated DigitalPullUpPullDownChannelsAndState', 'is_compound_type': True, 'max_length': 96, 'name': 'pullUpPullDownStates', 'repeated_var_args': True } ], 'returns': 'int32' }, 'SetExportedSignalAttributeBool': { 'cname': 'DAQmxSetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetExportedSignalAttributeDouble': { 'cname': 'DAQmxSetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetExportedSignalAttributeInt32': { 'cname': 'DAQmxSetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetExportedSignalAttributeString': { 'cname': 'DAQmxSetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetExportedSignalAttributeUInt32': { 'cname': 'DAQmxSetExportedSignalAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ExportSignalAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetFirstSampClkWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'SetReadAttributeBool': { 'cname': 'DAQmxSetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetReadAttributeDouble': { 'cname': 'DAQmxSetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetReadAttributeInt32': { 'cname': 'DAQmxSetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetReadAttributeString': { 'cname': 'DAQmxSetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetReadAttributeUInt32': { 'cname': 'DAQmxSetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetReadAttributeUInt64': { 'cname': 'DAQmxSetReadAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'ReadAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetRealTimeAttributeBool': { 'cname': 'DAQmxSetRealTimeAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'RealTimeAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetRealTimeAttributeInt32': { 'cname': 'DAQmxSetRealTimeAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'RealTimeAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetRealTimeAttributeUInt32': { 'cname': 'DAQmxSetRealTimeAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'RealTimeAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetScaleAttributeDouble': { 'cname': 'DAQmxSetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetScaleAttributeDoubleArray': { 'cname': 'DAQmxSetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'size': { 'mechanism': 'len', 'value': 'size' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetScaleAttributeInt32': { 'cname': 'DAQmxSetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetScaleAttributeString': { 'cname': 'DAQmxSetScaleAttribute', 'parameters': [ { 'direction': 'in', 'name': 'scaleName', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'ScaleAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetStartTrigTrigWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'SetSyncPulseTimeWhen': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'data', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'SetTimingAttributeBool': { 'cname': 'DAQmxSetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeDouble': { 'cname': 'DAQmxSetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeExBool': { 'cname': 'DAQmxSetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeExDouble': { 'cname': 'DAQmxSetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeExInt32': { 'cname': 'DAQmxSetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeExString': { 'cname': 'DAQmxSetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeExTimestamp': { 'cname': 'DAQmxSetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'CVIAbsoluteTime' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeExUInt32': { 'cname': 'DAQmxSetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeExUInt64': { 'cname': 'DAQmxSetTimingAttributeEx', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'deviceNames', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeInt32': { 'cname': 'DAQmxSetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeString': { 'cname': 'DAQmxSetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeTimestamp': { 'cname': 'DAQmxSetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'CVIAbsoluteTime' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeUInt32': { 'cname': 'DAQmxSetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTimingAttributeUInt64': { 'cname': 'DAQmxSetTimingAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TimingAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeBool': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeDouble': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeDoubleArray': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'size': { 'mechanism': 'len', 'value': 'size' }, 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeInt32': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeInt32Array': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'size': { 'mechanism': 'len', 'value': 'size' }, 'type': 'const int32[]' }, { 'direction': 'in', 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeString': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeTimestamp': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'CVIAbsoluteTime' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetTrigAttributeUInt32': { 'cname': 'DAQmxSetTrigAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'TriggerAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWatchdogAttributeBool': { 'cname': 'DAQmxSetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWatchdogAttributeDouble': { 'cname': 'DAQmxSetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWatchdogAttributeInt32': { 'cname': 'DAQmxSetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWatchdogAttributeString': { 'cname': 'DAQmxSetWatchdogAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'lines', 'type': 'const char[]' }, { 'direction': 'in', 'grpc_type': 'WatchdogAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWriteAttributeBool': { 'cname': 'DAQmxSetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'bool32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWriteAttributeDouble': { 'cname': 'DAQmxSetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWriteAttributeInt32': { 'cname': 'DAQmxSetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWriteAttributeString': { 'cname': 'DAQmxSetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'const char[]' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWriteAttributeUInt32': { 'cname': 'DAQmxSetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'SetWriteAttributeUInt64': { 'cname': 'DAQmxSetWriteAttribute', 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'grpc_type': 'WriteAttributes', 'name': 'attribute', 'type': 'int32' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt64' }, { 'direction': 'in', 'hardcoded_value': '0U', 'include_in_proto': False, 'name': 'size', 'type': 'uInt32' } ], 'returns': 'int32' }, 'StartNewFile': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'filePath', 'type': 'const char[]' } ], 'returns': 'int32' }, 'StartTask': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' } ], 'returns': 'int32' }, 'StopTask': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' } ], 'returns': 'int32' }, 'TaskControl': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'TaskControlAction', 'name': 'action', 'type': 'int32' } ], 'returns': 'int32' }, 'TristateOutputTerm': { 'parameters': [ { 'direction': 'in', 'name': 'outputTerminal', 'type': 'const char[]' } ], 'returns': 'int32' }, 'UnreserveNetworkDevice': { 'parameters': [ { 'direction': 'in', 'name': 'deviceName', 'type': 'const char[]' } ], 'returns': 'int32' }, 'WaitForNextSampleClock': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'isLate', 'type': 'bool32' } ], 'returns': 'int32' }, 'WaitForValidTimestamp': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'enum': 'TimestampEvent', 'name': 'timestampEvent', 'type': 'int32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'out', 'name': 'timestamp', 'type': 'CVIAbsoluteTime' } ], 'returns': 'int32' }, 'WaitUntilTaskDone': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'timeToWait', 'type': 'float64' } ], 'returns': 'int32' }, 'WriteAnalogF64': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'writeArray', 'type': 'const float64[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteAnalogScalarF64': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'name': 'value', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteBinaryI16': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'coerced': True, 'direction': 'in', 'name': 'writeArray', 'type': 'const int16[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteBinaryI32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'writeArray', 'type': 'const int32[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteBinaryU16': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'coerced': True, 'direction': 'in', 'name': 'writeArray', 'type': 'const uInt16[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteBinaryU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'writeArray', 'type': 'const uInt32[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteCtrFreq': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'frequency', 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'dutyCycle', 'type': 'const float64[]' }, { 'direction': 'out', 'name': 'numSampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteCtrFreqScalar': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'name': 'frequency', 'type': 'float64' }, { 'direction': 'in', 'name': 'dutyCycle', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteCtrTicks': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'highTicks', 'type': 'const uInt32[]' }, { 'direction': 'in', 'name': 'lowTicks', 'type': 'const uInt32[]' }, { 'direction': 'out', 'name': 'numSampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteCtrTicksScalar': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'name': 'highTicks', 'type': 'uInt32' }, { 'direction': 'in', 'name': 'lowTicks', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteCtrTime': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'highTime', 'type': 'const float64[]' }, { 'direction': 'in', 'name': 'lowTime', 'type': 'const float64[]' }, { 'direction': 'out', 'name': 'numSampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteCtrTimeScalar': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'name': 'highTime', 'type': 'float64' }, { 'direction': 'in', 'name': 'lowTime', 'type': 'float64' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteDigitalLines': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'writeArray', 'type': 'const uInt8[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteDigitalScalarU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'name': 'value', 'type': 'uInt32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteDigitalU16': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'coerced': True, 'direction': 'in', 'name': 'writeArray', 'type': 'const uInt16[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteDigitalU32': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'writeArray', 'type': 'const uInt32[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteDigitalU8': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSampsPerChan', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'enum': 'GroupBy', 'name': 'dataLayout', 'type': 'int32' }, { 'direction': 'in', 'name': 'writeArray', 'type': 'const uInt8[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteRaw': { 'parameters': [ { 'direction': 'in', 'name': 'task', 'type': 'TaskHandle' }, { 'direction': 'in', 'name': 'numSamps', 'type': 'int32' }, { 'direction': 'in', 'name': 'autoStart', 'type': 'bool32' }, { 'direction': 'in', 'name': 'timeout', 'type': 'float64' }, { 'direction': 'in', 'name': 'writeArray', 'type': 'const uInt8[]' }, { 'direction': 'out', 'name': 'sampsPerChanWritten', 'type': 'int32' }, { 'direction': 'in', 'hardcoded_value': 'nullptr', 'include_in_proto': False, 'name': 'reserved', 'pointer': True, 'type': 'bool32' } ], 'returns': 'int32' }, 'WriteToTEDSFromArray': { 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'bitStream', 'size': { 'mechanism': 'len', 'value': 'arraySize' }, 'type': 'const uInt8[]' }, { 'direction': 'in', 'name': 'arraySize', 'type': 'uInt32' }, { 'direction': 'in', 'enum': 'WriteBasicTEDSOptions', 'name': 'basicTEDSOptions', 'type': 'int32' } ], 'returns': 'int32' }, 'WriteToTEDSFromFile': { 'parameters': [ { 'direction': 'in', 'name': 'physicalChannel', 'type': 'const char[]' }, { 'direction': 'in', 'name': 'filePath', 'type': 'const char[]' }, { 'direction': 'in', 'enum': 'WriteBasicTEDSOptions', 'name': 'basicTEDSOptions', 'type': 'int32' } ], 'returns': 'int32' } }
27.675984
97
0.30702
17,984
383,174
6.498109
0.044929
0.178561
0.162756
0.067772
0.904409
0.8977
0.890597
0.886695
0.88139
0.873757
0
0.022157
0.545585
383,174
13,844
98
27.677983
0.649001
0
0
0.624675
0
0
0.307445
0.037557
0
0
0
0
0
1
0
false
0.001445
0.000072
0
0.000072
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f6b0b8d3885f1d7741a1cd2ea828454d0de94cbe
6,745
py
Python
mayan/apps/documents/tests/test_document_page_views.py
darrenflexxu/Mayan-EDMS
6707365bfacd137e625ddc1b990168012246fa07
[ "Apache-2.0" ]
null
null
null
mayan/apps/documents/tests/test_document_page_views.py
darrenflexxu/Mayan-EDMS
6707365bfacd137e625ddc1b990168012246fa07
[ "Apache-2.0" ]
5
2021-03-19T22:59:52.000Z
2022-03-12T00:13:16.000Z
mayan/apps/documents/tests/test_document_page_views.py
Sumit-Kumar-Jha/mayan
5b7ddeccf080b9e41cc1074c70e27dfe447be19f
[ "Apache-2.0" ]
1
2020-07-29T21:03:27.000Z
2020-07-29T21:03:27.000Z
from __future__ import unicode_literals from django.utils.encoding import force_text from ..permissions import ( permission_document_edit, permission_document_view ) from .base import GenericDocumentViewTestCase class DocumentPageDisableViewTestMixin(object): def _disable_test_document_page(self): self.test_document_page.enabled = False self.test_document_page.save() def _request_test_document_page_disable_view(self): return self.post( viewname='documents:document_page_disable', kwargs={ 'pk': self.test_document_page.pk } ) def _request_test_document_page_enable_view(self): return self.post( viewname='documents:document_page_enable', kwargs={ 'pk': self.test_document_page.pk } ) def _request_test_document_page_multiple_disable_view(self): return self.post( viewname='documents:document_page_multiple_disable', data={ 'id_list': self.test_document_page.pk } ) def _request_test_document_page_multiple_enable_view(self): return self.post( viewname='documents:document_page_multiple_enable', data={ 'id_list': self.test_document_page.pk } ) class DocumentPageDisableViewTestCase( DocumentPageDisableViewTestMixin, GenericDocumentViewTestCase ): def test_document_page_disable_view_no_permission(self): test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_disable_view() self.assertEqual(response.status_code, 404) self.assertEqual( test_document_page_count, self.test_document.pages.count() ) def test_document_page_disable_view_with_access(self): self.grant_access( obj=self.test_document, permission=permission_document_edit ) test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_disable_view() self.assertEqual(response.status_code, 302) self.assertNotEqual( test_document_page_count, self.test_document.pages.count() ) def test_document_page_multiple_disable_view_no_permission(self): test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_multiple_disable_view() self.assertEqual(response.status_code, 404) self.assertEqual( test_document_page_count, self.test_document.pages.count() ) def test_document_page_multiple_disable_view_with_access(self): self.grant_access( obj=self.test_document, permission=permission_document_edit ) test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_multiple_disable_view() self.assertEqual(response.status_code, 302) self.assertNotEqual( test_document_page_count, self.test_document.pages.count() ) def test_document_page_enable_view_no_permission(self): self._disable_test_document_page() test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_enable_view() self.assertEqual(response.status_code, 404) self.assertEqual( test_document_page_count, self.test_document.pages.count() ) def test_document_page_enable_view_with_access(self): self._disable_test_document_page() self.grant_access( obj=self.test_document, permission=permission_document_edit ) test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_enable_view() self.assertEqual(response.status_code, 302) self.assertNotEqual( test_document_page_count, self.test_document.pages.count() ) def test_document_page_multiple_enable_view_no_permission(self): self._disable_test_document_page() test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_multiple_enable_view() self.assertEqual(response.status_code, 404) self.assertEqual( test_document_page_count, self.test_document.pages.count() ) def test_document_page_multiple_enable_view_with_access(self): self._disable_test_document_page() self.grant_access( obj=self.test_document, permission=permission_document_edit ) test_document_page_count = self.test_document.pages.count() response = self._request_test_document_page_multiple_enable_view() self.assertEqual(response.status_code, 302) self.assertNotEqual( test_document_page_count, self.test_document.pages.count() ) class DocumentPageViewTestMixin(object): def _request_test_document_page_list_view(self): return self.get( viewname='documents:document_pages', kwargs={ 'pk': self.test_document.pk } ) def _request_test_document_page_view(self, document_page): return self.get( viewname='documents:document_page_view', kwargs={ 'pk': document_page.pk, } ) class DocumentPageViewTestCase( DocumentPageViewTestMixin, GenericDocumentViewTestCase ): def test_document_page_list_view_no_permission(self): response = self._request_test_document_page_list_view() self.assertEqual(response.status_code, 404) def test_document_page_list_view_with_access(self): self.grant_access( obj=self.test_document, permission=permission_document_view ) response = self._request_test_document_page_list_view() self.assertContains( response=response, text=self.test_document.label, status_code=200 ) def test_document_page_view_no_permissions(self): response = self._request_test_document_page_view( document_page=self.test_document.pages.first() ) self.assertEqual(response.status_code, 404) def test_document_page_view_with_access(self): self.grant_access( obj=self.test_document, permission=permission_document_view ) response = self._request_test_document_page_view( document_page=self.test_document.pages.first() ) self.assertContains( response=response, text=force_text( self.test_document.pages.first() ), status_code=200 )
33.226601
77
0.696219
752
6,745
5.776596
0.082447
0.232044
0.209945
0.091851
0.86349
0.810083
0.774632
0.759438
0.74977
0.737109
0
0.006939
0.230838
6,745
202
78
33.391089
0.830378
0
0
0.509804
0
0
0.031727
0.028466
0
0
0
0
0.130719
1
0.124183
false
0
0.026144
0.039216
0.215686
0
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
f6d5aa30a9b3e17cf907bda542a98685805e92d8
2,660
py
Python
server/apps/accounts/api/serializers.py
supercooledcreations/djangular-seed
6f73c7d91cd510ff03548a578a06730b4c351274
[ "MIT" ]
null
null
null
server/apps/accounts/api/serializers.py
supercooledcreations/djangular-seed
6f73c7d91cd510ff03548a578a06730b4c351274
[ "MIT" ]
2
2020-06-05T18:23:13.000Z
2021-06-10T20:30:24.000Z
server/apps/accounts/api/serializers.py
supercooledcreations/djangular-seed
6f73c7d91cd510ff03548a578a06730b4c351274
[ "MIT" ]
null
null
null
from django.contrib.auth import get_user_model from rest_framework import serializers from rest_framework.reverse import reverse as api_reverse User = get_user_model() class StandardUserRegisterSerializer(serializers.ModelSerializer): password = serializers.CharField(style={'input_type': 'password'}, write_only=True) confirm_password = serializers.CharField(style={'input_type': 'password'}, write_only=True) class Meta: model = User fields = ['username', 'email', 'password', 'confirm_password'] extra_kwargs = {'password': {'write_only': True}} def validate_username(self, value): qs = User.objects.filter(username__iexact=value) if qs.exists(): raise serializers.ValidationError("User with this username address already exists") return value def validate_email(self, value): qs = User.objects.filter(email__iexact=value) if qs.exists(): raise serializers.ValidationError("User with this email address already exists") return value def validate(self, data): pw = data.get("password") pw2 = data.get("confirm_password") if pw != pw2: raise serializers.ValidationError("Passwords must match") return data def create(self, validated_data): user_obj = User( username=validated_data.get('username'), email=validated_data.get('email'), ) user_obj.set_password(validated_data.get('password')) user_obj.save() return user_obj class EmailUserRegisterSerializer(serializers.ModelSerializer): password = serializers.CharField(style={'input_type': 'password'}, write_only=True) confirm_password = serializers.CharField(style={'input_type': 'password'}, write_only=True) class Meta: model = User fields = ['email', 'password', 'confirm_password'] extra_kwargs = {'password': {'write_only': True}} def validate_email(self, value): qs = User.objects.filter(email__iexact=value) if qs.exists(): raise serializers.ValidationError("User with this email address already exists") return value def validate(self, data): pw = data.get("password") pw2 = data.get("confirm_password") if pw != pw2: raise serializers.ValidationError("Passwords must match") return data def create(self, validated_data): user_obj = User( email=validated_data.get('email'), ) user_obj.set_password(validated_data.get('password')) user_obj.save() return user_obj
33.670886
95
0.657895
295
2,660
5.759322
0.20678
0.037081
0.060035
0.074161
0.841672
0.841672
0.825191
0.806945
0.806945
0.806945
0
0.001968
0.235714
2,660
79
96
33.670886
0.833743
0
0
0.766667
0
0
0.160842
0
0
0
0
0
0
1
0.116667
false
0.266667
0.05
0
0.416667
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
f6ff28b8c04d48d6511a0e130ad6dd82f1a49425
9,864
py
Python
theano/tests/test_gradient.py
jsalvatier/Theano-1
457bae18a0d3284841b9bcae5ce4ceee22f33132
[ "BSD-3-Clause" ]
1
2015-11-05T13:58:11.000Z
2015-11-05T13:58:11.000Z
theano/tests/test_gradient.py
jsalvatier/Theano-1
457bae18a0d3284841b9bcae5ce4ceee22f33132
[ "BSD-3-Clause" ]
null
null
null
theano/tests/test_gradient.py
jsalvatier/Theano-1
457bae18a0d3284841b9bcae5ce4ceee22f33132
[ "BSD-3-Clause" ]
null
null
null
# # UNIT TEST # import unittest import numpy import theano from theano import gof from theano.gradient import * from theano import gradient def _grad_sources_inputs(*args): # warn_type was introduced after this code, it complains throughout for nothing. return grad_sources_inputs(warn_type=False, *args) class test_grad_sources_inputs(unittest.TestCase): def test_retNone1(self): """Test that it is not ok to return None from op.grad()""" class retNone(gof.op.Op): def make_node(self): inputs = [gof.generic()] outputs = [gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inp, grads): x, = inp gz, = grads pass a = retNone().make_node() try: _grad_sources_inputs([(a.out, 1)], None) except ValueError, e: self.assertTrue(e[0] is gradient._msg_retType) return self.fail() def test_retNone1_b(self): """Test that it is ok to return [None] from op.grad()""" class retNone(gof.op.Op): def make_node(self, *inputs): outputs = [gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inp, grads): return [None] i = gof.generic() a = retNone().make_node(i) g = _grad_sources_inputs([(a.out, 1)], None) self.assertTrue(not i in g) def test_wrong_rval_len1(self): """Test that it is not ok to return the wrong number of gradients""" class retNone(gof.op.Op): def make_node(self, *inputs): outputs = [gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inputs, grads): return [None] i = gof.generic() j = gof.generic() a1 = retNone().make_node(i) g = _grad_sources_inputs([(a1.out, 1)], None) a2 = retNone().make_node(i,j) try: g = _grad_sources_inputs([(a2.out, 1)], None) except ValueError, e: self.assertTrue(e[0] is gradient._msg_badlen) return self.fail() def test_stop_on_all_none(self): """Test that op.grad() is not called when output grads are all None""" class retNone(gof.op.Op): def __init__(self, tst): self.tst = tst def make_node(self, *inputs): outputs = [gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inputs, grads): self.tst.fail() i = gof.generic() a1 = retNone(self).make_node(i) g = _grad_sources_inputs([(a1.out, None)], None) def test_1in_1out(self): """Test grad is called correctly for a 1-to-1 op""" gval = gof.generic() class O(gof.op.Op): def make_node(self): inputs = [gof.generic()] outputs = [gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inp, grads): return gval, a1 = O().make_node() g = _grad_sources_inputs([(a1.outputs[0], 1)], None) self.assertTrue(g[a1.inputs[0]] is gval) def test_1in_Nout(self): """Test grad is called correctly for a 1-to-many op""" gval = gof.generic() class O(gof.op.Op): def make_node(self): inputs = [gof.generic()] outputs = [gof.generic(),gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inp, grads): x, = inp gz1, gz2 = grads return gval, a1 = O().make_node() g = _grad_sources_inputs([(a1.outputs[0], 1)], None) self.assertTrue(g[a1.inputs[0]] is gval) def test_Nin_1out(self): """Test grad is called correctly for a many-to-1 op""" gval0 = gof.generic() gval1 = gof.generic() class O(gof.op.Op): def make_node(self): inputs = [gof.generic(),gof.generic()] outputs = [gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inp, grads): x0, x1 = inp gz, = grads return (gval0, gval1) a1 = O().make_node() g = _grad_sources_inputs([(a1.outputs[0], 1)], None) self.assertTrue(g[a1.inputs[0]] is gval0) self.assertTrue(g[a1.inputs[1]] is gval1) def test_Nin_Nout(self): """Test grad is called correctly for a many-to-many op""" gval0 = gof.generic() gval1 = gof.generic() class O(gof.op.Op): def make_node(self): inputs = [gof.generic(),gof.generic()] outputs = [gof.generic(),gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inp, grads): return gval0, gval1 a1 = O().make_node() g = _grad_sources_inputs([(a1.outputs[0], 1)], None) self.assertTrue(g[a1.inputs[0]] is gval0) self.assertTrue(g[a1.inputs[1]] is gval1) def test_some_None_ograds(self): """Test grad is called when some output gradients are None""" class O(gof.op.Op): def __init__(self, tst): self.tst = tst def make_node(self, *inputs): outputs = [gof.generic(),gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inputs, g_out): return [1] i = gof.generic() a1 = O(self).make_node(i) g = grad_sources_inputs([(a1.outputs[0], 1)], None, warn_type=False) self.assertTrue(g[i] is 1) def test_some_None_igrads(self): """Test that traversal works properly when an op return some None""" class O(gof.op.Op): def __init__(self, tst, grad_ok): self.tst = tst self.grad_ok = grad_ok def make_node(self, *inputs): outputs = [gof.generic(),gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inputs, g_out): if not self.grad_ok: self.tst.fail() else: return [1, None] i = gof.generic() j = gof.generic() k = gof.generic() a1 = O(self, True).make_node(i,j) a2 = O(self, True).make_node(a1.outputs[1], k) g = grad_sources_inputs([(a2.outputs[0], 1)], None, warn_type=False) self.assertTrue(g[i] is 1 and j not in g and k not in g) a1 = O(self, True).make_node(i,j) a2 = O(self, True).make_node(k, a1.outputs[1]) g = _grad_sources_inputs([(a2.outputs[0], 1)], None) self.assertTrue(g[k] is 1 and i not in g and j not in g) def test_inputs(self): """Test that passing inputs shortens the traversal""" class O(gof.op.Op): def __init__(self, tst, grad_ok): self.tst = tst self.grad_ok = grad_ok def make_node(self, *inputs): outputs = [gof.generic(),gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inputs, grads): g0, g1 = grads if not self.grad_ok: self.tst.fail() else: if g1: return [g0, g0+g1] else: return [g0, g0] i = gof.generic() j = gof.generic() k = gof.generic() a1 = O(self, True).make_node(i,j) a2 = O(self, True).make_node(k,a1.outputs[1]) g = _grad_sources_inputs([(a2.outputs[0], 1), (a1.outputs[1],4), (a1.outputs[0], 3), (a1.outputs[0], 3)], a1.outputs) self.assertTrue(g[a2.inputs[0]] == 1) self.assertTrue(g[a2.inputs[1]] == 5) self.assertTrue(g[a1.outputs[0]] == 6) self.assertTrue(g[a1.outputs[1]] == 5) self.assertTrue(a1.inputs[0] not in g) self.assertTrue(a1.inputs[1] not in g) def test_multiple_sources(self): """Test that passing multiple sources works""" class O(gof.op.Op): def __init__(self, tst, grad_ok): self.tst = tst self.grad_ok = grad_ok def make_node(self, *inputs): outputs = [gof.generic(),gof.generic()] return gof.Apply(self, inputs, outputs) def grad(self, inputs, grads): g0, g1 = grads if not self.grad_ok: self.tst.fail() else: if g1: return [g0, g0+g1] else: return [g0, g0] i = gof.generic() j = gof.generic() k = gof.generic() a1 = O(self,True).make_node(i,j) a2 = O(self,True).make_node(k,a1.outputs[1]) g = _grad_sources_inputs([(a2.outputs[0], 1), (a1.outputs[1],4), (a1.outputs[0], 3), (a1.outputs[0], 3)], None) self.assertTrue(g[a2.inputs[0]] == 1) self.assertTrue(g[a2.inputs[1]] == 5) self.assertTrue(g[a1.outputs[0]] == 6) self.assertTrue(g[a1.outputs[1]] == 5) self.assertTrue(g[a1.inputs[0]] == 6) self.assertTrue(g[a1.inputs[1]] == 11) def test_unimplemented_grad(): a = theano.tensor.vector() b = theano.gradient.unimplemented_grad(theano.tensor.add, 1, a) f = theano.function([a], b) try: f([1,2,3]) assert 0 except NotImplementedError: pass if __name__ == '__main__': unittest.main()
37.082707
84
0.52058
1,281
9,864
3.885246
0.103825
0.090416
0.064899
0.047016
0.7826
0.75226
0.736789
0.722323
0.714085
0.670082
0
0.027869
0.348844
9,864
265
85
37.222642
0.747003
0.008921
0
0.727273
0
0
0.000882
0
0
0
0
0
0.108225
0
null
null
0.008658
0.025974
null
null
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
123476c42d756f3312035558cc4082254cab0c8a
68,628
py
Python
benchmarks/SimResults/Paper2_pinned_spec_ml/cmp_gamessmcfmilczeusmp/power.py
TugberkArkose/MLScheduler
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
[ "Unlicense" ]
null
null
null
benchmarks/SimResults/Paper2_pinned_spec_ml/cmp_gamessmcfmilczeusmp/power.py
TugberkArkose/MLScheduler
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
[ "Unlicense" ]
null
null
null
benchmarks/SimResults/Paper2_pinned_spec_ml/cmp_gamessmcfmilczeusmp/power.py
TugberkArkose/MLScheduler
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
[ "Unlicense" ]
null
null
null
power = {'BUSES': {'Area': 1.33155, 'Bus/Area': 1.33155, 'Bus/Gate Leakage': 0.00662954, 'Bus/Peak Dynamic': 0.0, 'Bus/Runtime Dynamic': 0.0, 'Bus/Subthreshold Leakage': 0.0691322, 'Bus/Subthreshold Leakage with power gating': 0.0259246, 'Gate Leakage': 0.00662954, 'Peak Dynamic': 0.0, 'Runtime Dynamic': 0.0, 'Subthreshold Leakage': 0.0691322, 'Subthreshold Leakage with power gating': 0.0259246}, 'Core': [{'Area': 32.6082, 'Execution Unit/Area': 8.2042, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.0771141, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.263257, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.380219, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.122718, 'Execution Unit/Instruction Scheduler/Area': 2.17927, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.208186, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.360502, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.206758, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.775446, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.147491, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 5.85579, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0718315, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00754689, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0849168, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0558139, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.156748, 'Execution Unit/Register Files/Runtime Dynamic': 0.0633608, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.22632, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.490034, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155, 'Execution Unit/Runtime Dynamic': 1.99747, 'Execution Unit/Subthreshold Leakage': 1.83518, 'Execution Unit/Subthreshold Leakage with power gating': 0.709678, 'Gate Leakage': 0.372997, 'Instruction Fetch Unit/Area': 5.86007, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00047789, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00047789, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000414961, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000159938, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000801771, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00217251, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0046277, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0590479, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0536553, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.41294, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.110385, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.182238, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 5.79963, 'Instruction Fetch Unit/Runtime Dynamic': 0.353079, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932587, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0618757, 'L2/Runtime Dynamic': 0.0178262, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80969, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 2.96425, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.857433, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0351387, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0558767, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0558767, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 3.22918, 'Load Store Unit/Runtime Dynamic': 1.18887, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.137782, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.275565, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591622, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283406, 'Memory Management Unit/Area': 0.434579, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.0488994, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0498258, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00813591, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.212204, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0181048, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.45524, 'Memory Management Unit/Runtime Dynamic': 0.0679306, 'Memory Management Unit/Subthreshold Leakage': 0.0769113, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462, 'Peak Dynamic': 19.9634, 'Renaming Unit/Area': 0.369768, 'Renaming Unit/FP Front End RAT/Area': 0.168486, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.250603, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925, 'Renaming Unit/Free List/Area': 0.0414755, 'Renaming Unit/Free List/Gate Leakage': 4.15911e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0401324, 'Renaming Unit/Free List/Runtime Dynamic': 0.013661, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987, 'Renaming Unit/Gate Leakage': 0.00863632, 'Renaming Unit/Int Front End RAT/Area': 0.114751, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.104664, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781, 'Renaming Unit/Peak Dynamic': 4.56169, 'Renaming Unit/Runtime Dynamic': 0.368928, 'Renaming Unit/Subthreshold Leakage': 0.070483, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779, 'Runtime Dynamic': 3.99411, 'Subthreshold Leakage': 6.21877, 'Subthreshold Leakage with power gating': 2.58311}, {'Area': 32.0201, 'Execution Unit/Area': 7.68434, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.0760419, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.262415, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.357299, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.120359, 'Execution Unit/Instruction Scheduler/Area': 1.66526, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.177874, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.286904, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.144819, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.609597, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.148657, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 4.77387, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0675015, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00746082, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0845891, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0551773, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.152091, 'Execution Unit/Register Files/Runtime Dynamic': 0.0626382, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.19722, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.438696, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543, 'Execution Unit/Runtime Dynamic': 1.77872, 'Execution Unit/Subthreshold Leakage': 1.79543, 'Execution Unit/Subthreshold Leakage with power gating': 0.688821, 'Gate Leakage': 0.368936, 'Instruction Fetch Unit/Area': 5.85939, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000528599, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000528599, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000472514, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000189539, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000792627, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00232234, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00463566, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0589979, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0530434, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.37402, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.116596, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.180159, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 5.75628, 'Instruction Fetch Unit/Runtime Dynamic': 0.356757, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932286, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0436713, 'L2/Runtime Dynamic': 0.0127922, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80901, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 2.71091, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.733094, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0350888, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0476805, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0476803, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 2.93606, 'Load Store Unit/Runtime Dynamic': 1.01592, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.117572, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.235143, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591321, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283293, 'Memory Management Unit/Area': 0.4339, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.0417266, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0423696, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00808595, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.209784, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0191523, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.437572, 'Memory Management Unit/Runtime Dynamic': 0.0615219, 'Memory Management Unit/Subthreshold Leakage': 0.0766103, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333, 'Peak Dynamic': 17.5369, 'Renaming Unit/Area': 0.303608, 'Renaming Unit/FP Front End RAT/Area': 0.131045, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.177565, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885, 'Renaming Unit/Free List/Area': 0.0340654, 'Renaming Unit/Free List/Gate Leakage': 2.5481e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0306032, 'Renaming Unit/Free List/Runtime Dynamic': 0.0101861, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064, 'Renaming Unit/Gate Leakage': 0.00708398, 'Renaming Unit/Int Front End RAT/Area': 0.0941223, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0881921, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228, 'Renaming Unit/Peak Dynamic': 3.58947, 'Renaming Unit/Runtime Dynamic': 0.275944, 'Renaming Unit/Subthreshold Leakage': 0.0552466, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461, 'Runtime Dynamic': 3.50165, 'Subthreshold Leakage': 6.16288, 'Subthreshold Leakage with power gating': 2.55328}, {'Area': 32.0201, 'Execution Unit/Area': 7.68434, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.0852649, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.269659, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.38053, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.120359, 'Execution Unit/Instruction Scheduler/Area': 1.66526, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.223312, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.360195, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.181814, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.765321, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.197064, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 4.92482, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0718903, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00936673, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.102905, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0692727, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.174795, 'Execution Unit/Register Files/Runtime Dynamic': 0.0786394, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.238112, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.547938, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543, 'Execution Unit/Runtime Dynamic': 2.06694, 'Execution Unit/Subthreshold Leakage': 1.79543, 'Execution Unit/Subthreshold Leakage with power gating': 0.688821, 'Gate Leakage': 0.368936, 'Instruction Fetch Unit/Area': 5.85939, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000750692, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000750692, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000670992, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000269126, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000995108, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00316748, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00658517, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0589979, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0665936, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.23592, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.155896, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.226182, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 6.66002, 'Instruction Fetch Unit/Runtime Dynamic': 0.458424, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932286, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0373351, 'L2/Runtime Dynamic': 0.00941809, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80901, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 3.08612, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.908779, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0350888, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0598194, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0598195, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 3.3686, 'Load Store Unit/Runtime Dynamic': 1.26361, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.147505, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.29501, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591321, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283293, 'Memory Management Unit/Area': 0.4339, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.0523498, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0528892, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00808595, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.263374, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0256202, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.509411, 'Memory Management Unit/Runtime Dynamic': 0.0785094, 'Memory Management Unit/Subthreshold Leakage': 0.0766103, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333, 'Peak Dynamic': 19.0897, 'Renaming Unit/Area': 0.303608, 'Renaming Unit/FP Front End RAT/Area': 0.131045, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.18911, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885, 'Renaming Unit/Free List/Area': 0.0340654, 'Renaming Unit/Free List/Gate Leakage': 2.5481e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0306032, 'Renaming Unit/Free List/Runtime Dynamic': 0.0123767, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064, 'Renaming Unit/Gate Leakage': 0.00708398, 'Renaming Unit/Int Front End RAT/Area': 0.0941223, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.111772, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228, 'Renaming Unit/Peak Dynamic': 3.58947, 'Renaming Unit/Runtime Dynamic': 0.313259, 'Renaming Unit/Subthreshold Leakage': 0.0552466, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461, 'Runtime Dynamic': 4.19015, 'Subthreshold Leakage': 6.16288, 'Subthreshold Leakage with power gating': 2.55328}, {'Area': 32.0201, 'Execution Unit/Area': 7.68434, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.079865, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.265418, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.379683, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.120359, 'Execution Unit/Instruction Scheduler/Area': 1.66526, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.194488, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.313702, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.158346, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.666537, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.164227, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 4.84281, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0717302, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00815771, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0909891, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0603313, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.162719, 'Execution Unit/Register Files/Runtime Dynamic': 0.068489, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.211659, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.480699, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543, 'Execution Unit/Runtime Dynamic': 1.88652, 'Execution Unit/Subthreshold Leakage': 1.79543, 'Execution Unit/Subthreshold Leakage with power gating': 0.688821, 'Gate Leakage': 0.368936, 'Instruction Fetch Unit/Area': 5.85939, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000543393, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000543393, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000479942, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000189429, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000866664, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00243339, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00497251, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0589979, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.057998, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.68917, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.130341, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.196987, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 6.08673, 'Instruction Fetch Unit/Runtime Dynamic': 0.392732, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932286, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0677844, 'L2/Runtime Dynamic': 0.0173301, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80901, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 2.70339, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.744952, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0350888, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0474374, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0474374, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 2.9274, 'Load Store Unit/Runtime Dynamic': 1.02633, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.116973, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.233945, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591321, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283293, 'Memory Management Unit/Area': 0.4339, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.041514, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0425214, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00808595, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.229379, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0213987, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.456801, 'Memory Management Unit/Runtime Dynamic': 0.0639201, 'Memory Management Unit/Subthreshold Leakage': 0.0766103, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333, 'Peak Dynamic': 17.971, 'Renaming Unit/Area': 0.303608, 'Renaming Unit/FP Front End RAT/Area': 0.131045, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.188689, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885, 'Renaming Unit/Free List/Area': 0.0340654, 'Renaming Unit/Free List/Gate Leakage': 2.5481e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0306032, 'Renaming Unit/Free List/Runtime Dynamic': 0.0110711, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064, 'Renaming Unit/Gate Leakage': 0.00708398, 'Renaming Unit/Int Front End RAT/Area': 0.0941223, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0966571, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228, 'Renaming Unit/Peak Dynamic': 3.58947, 'Renaming Unit/Runtime Dynamic': 0.296417, 'Renaming Unit/Subthreshold Leakage': 0.0552466, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461, 'Runtime Dynamic': 3.68325, 'Subthreshold Leakage': 6.16288, 'Subthreshold Leakage with power gating': 2.55328}], 'DRAM': {'Area': 0, 'Gate Leakage': 0, 'Peak Dynamic': 4.787049090154534, 'Runtime Dynamic': 4.787049090154534, 'Subthreshold Leakage': 4.252, 'Subthreshold Leakage with power gating': 4.252}, 'L3': [{'Area': 61.9075, 'Gate Leakage': 0.0484137, 'Peak Dynamic': 0.256964, 'Runtime Dynamic': 0.103345, 'Subthreshold Leakage': 6.80085, 'Subthreshold Leakage with power gating': 3.32364}], 'Processor': {'Area': 191.908, 'Gate Leakage': 1.53485, 'Peak Dynamic': 74.8179, 'Peak Power': 107.93, 'Runtime Dynamic': 15.4725, 'Subthreshold Leakage': 31.5774, 'Subthreshold Leakage with power gating': 13.9484, 'Total Cores/Area': 128.669, 'Total Cores/Gate Leakage': 1.4798, 'Total Cores/Peak Dynamic': 74.561, 'Total Cores/Runtime Dynamic': 15.3692, 'Total Cores/Subthreshold Leakage': 24.7074, 'Total Cores/Subthreshold Leakage with power gating': 10.2429, 'Total L3s/Area': 61.9075, 'Total L3s/Gate Leakage': 0.0484137, 'Total L3s/Peak Dynamic': 0.256964, 'Total L3s/Runtime Dynamic': 0.103345, 'Total L3s/Subthreshold Leakage': 6.80085, 'Total L3s/Subthreshold Leakage with power gating': 3.32364, 'Total Leakage': 33.1122, 'Total NoCs/Area': 1.33155, 'Total NoCs/Gate Leakage': 0.00662954, 'Total NoCs/Peak Dynamic': 0.0, 'Total NoCs/Runtime Dynamic': 0.0, 'Total NoCs/Subthreshold Leakage': 0.0691322, 'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
75.085339
124
0.682171
8,082
68,628
5.786686
0.067558
0.123503
0.112898
0.093397
0.939745
0.931855
0.918876
0.887808
0.862791
0.842114
0
0.132258
0.224267
68,628
914
125
75.085339
0.746229
0
0
0.642232
0
0
0.657229
0.048085
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
89fdbfde34f6ef8f6ead02e4493ec359adeda3cb
498
py
Python
MillerArrays/millerArrayPrintSummary.py
MooersLab/jupyterlabcctbxsnipsplus
80a380046adcc9b16581ed1681884017514edbb7
[ "MIT" ]
null
null
null
MillerArrays/millerArrayPrintSummary.py
MooersLab/jupyterlabcctbxsnipsplus
80a380046adcc9b16581ed1681884017514edbb7
[ "MIT" ]
null
null
null
MillerArrays/millerArrayPrintSummary.py
MooersLab/jupyterlabcctbxsnipsplus
80a380046adcc9b16581ed1681884017514edbb7
[ "MIT" ]
null
null
null
# Description: Read mtz file into a miller array and print summary. # Source: NA """ from iotbx.reflection_file_reader import any_reflection_file hkl_in = any_reflection_file(file_name="${1:3nd4}.mtz") miller_arrays = hkl_in.as_miller_arrays() f_obs = miller_arrays[0] f_obs.show_summary() """ from iotbx.reflection_file_reader import any_reflection_file hkl_in = any_reflection_file(file_name="3nd4.mtz") miller_arrays = hkl_in.as_miller_arrays() f_obs = miller_arrays[0] f_obs.show_summary()
29.294118
68
0.799197
82
498
4.463415
0.365854
0.229508
0.185792
0.125683
0.830601
0.830601
0.830601
0.830601
0.830601
0.830601
0
0.01559
0.098394
498
16
69
31.125
0.799555
0.570281
0
0
0
0
0.039024
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
d627fded7cc9b3c3f15d06dac1b624667cd22cf0
82,090
py
Python
RI/flask_server/tapi_server/test/test_tapi_path_computation_controller.py
arthurMll/TAPI
e1171bb139c6791a953af09cfc2bc7ad928da73d
[ "Apache-2.0" ]
57
2018-04-09T08:56:18.000Z
2022-03-23T08:31:06.000Z
RI/flask_server/tapi_server/test/test_tapi_path_computation_controller.py
arthurMll/TAPI
e1171bb139c6791a953af09cfc2bc7ad928da73d
[ "Apache-2.0" ]
143
2016-06-08T04:09:54.000Z
2018-02-23T10:45:59.000Z
RI/flask_server/tapi_server/test/test_tapi_path_computation_controller.py
arthurMll/TAPI
e1171bb139c6791a953af09cfc2bc7ad928da73d
[ "Apache-2.0" ]
64
2018-03-07T07:55:17.000Z
2022-03-28T07:14:28.000Z
# coding: utf-8 from __future__ import absolute_import from flask import json from six import BytesIO from tapi_server.models.inline_object import InlineObject # noqa: E501 from tapi_server.models.inline_object11 import InlineObject11 # noqa: E501 from tapi_server.models.inline_object26 import InlineObject26 # noqa: E501 from tapi_server.models.tapi_common_bandwidth_profile import TapiCommonBandwidthProfile # noqa: E501 from tapi_server.models.tapi_common_capacity import TapiCommonCapacity # noqa: E501 from tapi_server.models.tapi_common_capacity_value import TapiCommonCapacityValue # noqa: E501 from tapi_server.models.tapi_common_name_and_value import TapiCommonNameAndValue # noqa: E501 from tapi_server.models.tapi_common_service_interface_point_ref import TapiCommonServiceInterfacePointRef # noqa: E501 from tapi_server.models.tapi_path_computation_compute_p2_p_path import TapiPathComputationComputeP2PPath # noqa: E501 from tapi_server.models.tapi_path_computation_delete_p2_p_path import TapiPathComputationDeleteP2PPath # noqa: E501 from tapi_server.models.tapi_path_computation_optimize_p2_p_path import TapiPathComputationOptimizeP2PPath # noqa: E501 from tapi_server.models.tapi_path_computation_path import TapiPathComputationPath # noqa: E501 from tapi_server.models.tapi_path_computation_path_computation_context import TapiPathComputationPathComputationContext # noqa: E501 from tapi_server.models.tapi_path_computation_path_computation_service import TapiPathComputationPathComputationService # noqa: E501 from tapi_server.models.tapi_path_computation_path_objective_function import TapiPathComputationPathObjectiveFunction # noqa: E501 from tapi_server.models.tapi_path_computation_path_optimization_constraint import TapiPathComputationPathOptimizationConstraint # noqa: E501 from tapi_server.models.tapi_path_computation_path_ref import TapiPathComputationPathRef # noqa: E501 from tapi_server.models.tapi_path_computation_path_service_end_point import TapiPathComputationPathServiceEndPoint # noqa: E501 from tapi_server.models.tapi_path_computation_routing_constraint import TapiPathComputationRoutingConstraint # noqa: E501 from tapi_server.models.tapi_path_computation_topology_constraint import TapiPathComputationTopologyConstraint # noqa: E501 from tapi_server.models.tapi_topology_cost_characteristic import TapiTopologyCostCharacteristic # noqa: E501 from tapi_server.models.tapi_topology_latency_characteristic import TapiTopologyLatencyCharacteristic # noqa: E501 from tapi_server.models.tapi_topology_link_ref import TapiTopologyLinkRef # noqa: E501 from tapi_server.models.tapi_topology_node_ref import TapiTopologyNodeRef # noqa: E501 from tapi_server.models.tapi_topology_risk_characteristic import TapiTopologyRiskCharacteristic # noqa: E501 from tapi_server.models.tapi_topology_topology_ref import TapiTopologyTopologyRef # noqa: E501 from tapi_server.test import BaseTestCase class TestTapiPathComputationController(BaseTestCase): """TapiPathComputationController integration test stubs""" def test_data_context_path_computation_context_delete(self): """Test case for data_context_path_computation_context_delete """ response = self.client.open( '/data/context/path-computation-context/', method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_get(self): """Test case for data_context_path_computation_context_get """ response = self.client.open( '/data/context/path-computation-context/', method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_service_post(self): """Test case for data_context_path_computation_context_path_comp_service_post """ tapi_path_computation_path_computation_service = TapiPathComputationPathComputationService() response = self.client.open( '/data/context/path-computation-context/path-comp-service/', method='POST', data=json.dumps(tapi_path_computation_path_computation_service), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/'.format(uuid='uuid_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_point_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_point_post """ tapi_path_computation_path_service_end_point = TapiPathComputationPathServiceEndPoint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_path_computation_path_service_end_point), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_post """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_put """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_post """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_put """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/committed-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_post """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_put """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-burst-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_post """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_put """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/peak-information-rate/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_post """ tapi_common_bandwidth_profile = TapiCommonBandwidthProfile() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_bandwidth_profile), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_put """ tapi_common_bandwidth_profile = TapiCommonBandwidthProfile() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/bandwidth-profile/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_common_bandwidth_profile), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_post """ tapi_common_capacity = TapiCommonCapacity() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_capacity), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_put """ tapi_common_capacity = TapiCommonCapacity() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_common_capacity), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/total-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/total-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_post """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/total-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_capacity_total_size_put """ tapi_common_capacity_value = TapiCommonCapacityValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/capacity/total-size/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_common_capacity_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/'.format(uuid='uuid_example', local_id='local_id_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/name/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/name={value-name}/'.format(uuid='uuid_example', local_id='local_id_example', value_name='value_name_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/name={value-name}/'.format(uuid='uuid_example', local_id='local_id_example', value_name='value_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/name={value-name}/'.format(uuid='uuid_example', local_id='local_id_example', value_name='value_name_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_namevalue_name_put """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/name={value-name}/'.format(uuid='uuid_example', local_id='local_id_example', value_name='value_name_example'), method='PUT', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_post """ tapi_path_computation_path_service_end_point = TapiPathComputationPathServiceEndPoint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/'.format(uuid='uuid_example', local_id='local_id_example'), method='POST', data=json.dumps(tapi_path_computation_path_service_end_point), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_put """ tapi_path_computation_path_service_end_point = TapiPathComputationPathServiceEndPoint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/'.format(uuid='uuid_example', local_id='local_id_example'), method='PUT', data=json.dumps(tapi_path_computation_path_service_end_point), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_service_interface_point_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_end_pointlocal_id_service_interface_point_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/end-point={local-id}/service-interface-point/'.format(uuid='uuid_example', local_id='local_id_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/'.format(uuid='uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/name/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_namevalue_name_put """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='PUT', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/'.format(uuid='uuid_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/'.format(uuid='uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/name/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_namevalue_name_put """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='PUT', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_post """ tapi_path_computation_path_objective_function = TapiPathComputationPathObjectiveFunction() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_path_computation_path_objective_function), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_objective_function_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_objective_function_put """ tapi_path_computation_path_objective_function = TapiPathComputationPathObjectiveFunction() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/objective-function/'.format(uuid='uuid_example'), method='PUT', data=json.dumps(tapi_path_computation_path_objective_function), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/'.format(uuid='uuid_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/'.format(uuid='uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/name/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_post """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='POST', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_namevalue_name_put """ tapi_common_name_and_value = TapiCommonNameAndValue() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='PUT', data=json.dumps(tapi_common_name_and_value), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_post """ tapi_path_computation_path_optimization_constraint = TapiPathComputationPathOptimizationConstraint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_path_computation_path_optimization_constraint), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_optimization_constraint_put """ tapi_path_computation_path_optimization_constraint = TapiPathComputationPathOptimizationConstraint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/optimization-constraint/'.format(uuid='uuid_example'), method='PUT', data=json.dumps(tapi_path_computation_path_optimization_constraint), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_pathpath_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_pathpath_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/path={path-uuid}/'.format(uuid='uuid_example', path_uuid='path_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_post """ tapi_path_computation_path_computation_service = TapiPathComputationPathComputationService() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_path_computation_path_computation_service), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_put """ tapi_path_computation_path_computation_service = TapiPathComputationPathComputationService() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/'.format(uuid='uuid_example'), method='PUT', data=json.dumps(tapi_path_computation_path_computation_service), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristic_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristic_post """ tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/cost-characteristic/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_topology_cost_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/cost-characteristic={cost-name}/'.format(uuid='uuid_example', cost_name='cost_name_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/cost-characteristic={cost-name}/'.format(uuid='uuid_example', cost_name='cost_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_post """ tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/cost-characteristic={cost-name}/'.format(uuid='uuid_example', cost_name='cost_name_example'), method='POST', data=json.dumps(tapi_topology_cost_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_cost_characteristiccost_name_put """ tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/cost-characteristic={cost-name}/'.format(uuid='uuid_example', cost_name='cost_name_example'), method='PUT', data=json.dumps(tapi_topology_cost_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/'.format(uuid='uuid_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/'.format(uuid='uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristic_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristic_post """ tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/latency-characteristic/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_topology_latency_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/latency-characteristic={traffic-property-name}/'.format(uuid='uuid_example', traffic_property_name='traffic_property_name_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/latency-characteristic={traffic-property-name}/'.format(uuid='uuid_example', traffic_property_name='traffic_property_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_post """ tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/latency-characteristic={traffic-property-name}/'.format(uuid='uuid_example', traffic_property_name='traffic_property_name_example'), method='POST', data=json.dumps(tapi_topology_latency_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_latency_characteristictraffic_property_name_put """ tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/latency-characteristic={traffic-property-name}/'.format(uuid='uuid_example', traffic_property_name='traffic_property_name_example'), method='PUT', data=json.dumps(tapi_topology_latency_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_post """ tapi_path_computation_routing_constraint = TapiPathComputationRoutingConstraint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_path_computation_routing_constraint), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_put """ tapi_path_computation_routing_constraint = TapiPathComputationRoutingConstraint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/'.format(uuid='uuid_example'), method='PUT', data=json.dumps(tapi_path_computation_routing_constraint), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristic_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristic_post """ tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/risk-diversity-characteristic/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_topology_risk_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/risk-diversity-characteristic={risk-characteristic-name}/'.format(uuid='uuid_example', risk_characteristic_name='risk_characteristic_name_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/risk-diversity-characteristic={risk-characteristic-name}/'.format(uuid='uuid_example', risk_characteristic_name='risk_characteristic_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_post """ tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/risk-diversity-characteristic={risk-characteristic-name}/'.format(uuid='uuid_example', risk_characteristic_name='risk_characteristic_name_example'), method='POST', data=json.dumps(tapi_topology_risk_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_put """ tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/routing-constraint/risk-diversity-characteristic={risk-characteristic-name}/'.format(uuid='uuid_example', risk_characteristic_name='risk_characteristic_name_example'), method='PUT', data=json.dumps(tapi_topology_risk_characteristic), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_avoid_topologytopology_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_avoid_topologytopology_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/avoid-topology={topology-uuid}/'.format(uuid='uuid_example', topology_uuid='topology_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_delete(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_delete """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/'.format(uuid='uuid_example'), method='DELETE') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_exclude_linktopology_uuidlink_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_exclude_linktopology_uuidlink_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/exclude-link={topology-uuid},{link-uuid}/'.format(uuid='uuid_example', topology_uuid='topology_uuid_example', link_uuid='link_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_exclude_nodetopology_uuidnode_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_exclude_nodetopology_uuidnode_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/exclude-node={topology-uuid},{node-uuid}/'.format(uuid='uuid_example', topology_uuid='topology_uuid_example', node_uuid='node_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_exclude_pathpath_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_exclude_pathpath_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/exclude-path={path-uuid}/'.format(uuid='uuid_example', path_uuid='path_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/'.format(uuid='uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_linktopology_uuidlink_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_linktopology_uuidlink_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/include-link={topology-uuid},{link-uuid}/'.format(uuid='uuid_example', topology_uuid='topology_uuid_example', link_uuid='link_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_nodetopology_uuidnode_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_nodetopology_uuidnode_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/include-node={topology-uuid},{node-uuid}/'.format(uuid='uuid_example', topology_uuid='topology_uuid_example', node_uuid='node_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_pathpath_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_pathpath_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/include-path={path-uuid}/'.format(uuid='uuid_example', path_uuid='path_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_topologytopology_uuid_get(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_include_topologytopology_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/include-topology={topology-uuid}/'.format(uuid='uuid_example', topology_uuid='topology_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_post(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_post """ tapi_path_computation_topology_constraint = TapiPathComputationTopologyConstraint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/'.format(uuid='uuid_example'), method='POST', data=json.dumps(tapi_path_computation_topology_constraint), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_put(self): """Test case for data_context_path_computation_context_path_comp_serviceuuid_topology_constraint_put """ tapi_path_computation_topology_constraint = TapiPathComputationTopologyConstraint() response = self.client.open( '/data/context/path-computation-context/path-comp-service={uuid}/topology-constraint/'.format(uuid='uuid_example'), method='PUT', data=json.dumps(tapi_path_computation_topology_constraint), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_pathuuid_get(self): """Test case for data_context_path_computation_context_pathuuid_get """ response = self.client.open( '/data/context/path-computation-context/path={uuid}/'.format(uuid='uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_pathuuid_linktopology_uuidlink_uuid_get(self): """Test case for data_context_path_computation_context_pathuuid_linktopology_uuidlink_uuid_get """ response = self.client.open( '/data/context/path-computation-context/path={uuid}/link={topology-uuid},{link-uuid}/'.format(uuid='uuid_example', topology_uuid='topology_uuid_example', link_uuid='link_uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_pathuuid_namevalue_name_get(self): """Test case for data_context_path_computation_context_pathuuid_namevalue_name_get """ response = self.client.open( '/data/context/path-computation-context/path={uuid}/name={value-name}/'.format(uuid='uuid_example', value_name='value_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_pathuuid_routing_constraint_cost_characteristiccost_name_get(self): """Test case for data_context_path_computation_context_pathuuid_routing_constraint_cost_characteristiccost_name_get """ response = self.client.open( '/data/context/path-computation-context/path={uuid}/routing-constraint/cost-characteristic={cost-name}/'.format(uuid='uuid_example', cost_name='cost_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_pathuuid_routing_constraint_get(self): """Test case for data_context_path_computation_context_pathuuid_routing_constraint_get """ response = self.client.open( '/data/context/path-computation-context/path={uuid}/routing-constraint/'.format(uuid='uuid_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_pathuuid_routing_constraint_latency_characteristictraffic_property_name_get(self): """Test case for data_context_path_computation_context_pathuuid_routing_constraint_latency_characteristictraffic_property_name_get """ response = self.client.open( '/data/context/path-computation-context/path={uuid}/routing-constraint/latency-characteristic={traffic-property-name}/'.format(uuid='uuid_example', traffic_property_name='traffic_property_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_pathuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_get(self): """Test case for data_context_path_computation_context_pathuuid_routing_constraint_risk_diversity_characteristicrisk_characteristic_name_get """ response = self.client.open( '/data/context/path-computation-context/path={uuid}/routing-constraint/risk-diversity-characteristic={risk-characteristic-name}/'.format(uuid='uuid_example', risk_characteristic_name='risk_characteristic_name_example'), method='GET') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_post(self): """Test case for data_context_path_computation_context_post """ tapi_path_computation_path_computation_context = TapiPathComputationPathComputationContext() response = self.client.open( '/data/context/path-computation-context/', method='POST', data=json.dumps(tapi_path_computation_path_computation_context), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_data_context_path_computation_context_put(self): """Test case for data_context_path_computation_context_put """ tapi_path_computation_path_computation_context = TapiPathComputationPathComputationContext() response = self.client.open( '/data/context/path-computation-context/', method='PUT', data=json.dumps(tapi_path_computation_path_computation_context), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_operations_compute_p2_p_path_post(self): """Test case for operations_compute_p2_p_path_post """ inline_object = InlineObject() response = self.client.open( '/operations/compute-p-2-p-path/', method='POST', data=json.dumps(inline_object), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_operations_delete_p2_p_path_post(self): """Test case for operations_delete_p2_p_path_post """ inline_object11 = InlineObject11() response = self.client.open( '/operations/delete-p-2-p-path/', method='POST', data=json.dumps(inline_object11), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_operations_optimize_p2_p_path_post(self): """Test case for operations_optimize_p2_p_path_post """ inline_object26 = InlineObject26() response = self.client.open( '/operations/optimize-p-2-p-path/', method='POST', data=json.dumps(inline_object26), content_type='application/yang-data+json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) if __name__ == '__main__': import unittest unittest.main()
56.652864
244
0.718285
9,360
82,090
5.894658
0.015278
0.1264
0.133577
0.155508
0.973629
0.96975
0.967303
0.965074
0.958658
0.950303
0
0.008465
0.185455
82,090
1,448
245
56.691989
0.816678
0.184943
0
0.791762
0
0.113272
0.28116
0.197463
0
0
0
0
0.129291
1
0.129291
false
0
0.035469
0
0.165904
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
6137d86f6395672a72e0dec2c2c08529e2db2272
2,625
py
Python
tests/path/vshadow_path_spec.py
Defense-Cyber-Crime-Center/dfvfs
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
[ "Apache-2.0" ]
2
2016-02-18T12:46:26.000Z
2022-03-13T03:05:05.000Z
tests/path/vshadow_path_spec.py
Defense-Cyber-Crime-Center/dfvfs
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
[ "Apache-2.0" ]
null
null
null
tests/path/vshadow_path_spec.py
Defense-Cyber-Crime-Center/dfvfs
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
[ "Apache-2.0" ]
5
2016-12-18T08:05:39.000Z
2019-11-19T21:18:00.000Z
#!/usr/bin/python # -*- coding: utf-8 -*- """Tests for the VSS path specification implementation.""" import unittest from tests.path import test_lib from dfvfs.path import vshadow_path_spec class VShadowPathSpecTest(test_lib.PathSpecTestCase): """Tests for the VSS path specification implementation.""" def testInitialize(self): """Tests the path specification initialization.""" path_spec = vshadow_path_spec.VShadowPathSpec(parent=self._path_spec) self.assertNotEqual(path_spec, None) path_spec = vshadow_path_spec.VShadowPathSpec( location=u'/vss2', parent=self._path_spec) self.assertNotEqual(path_spec, None) path_spec = vshadow_path_spec.VShadowPathSpec( store_index=1, parent=self._path_spec) self.assertNotEqual(path_spec, None) path_spec = vshadow_path_spec.VShadowPathSpec( location=u'/vss2', store_index=1, parent=self._path_spec) self.assertNotEqual(path_spec, None) with self.assertRaises(ValueError): _ = vshadow_path_spec.VShadowPathSpec(parent=None) with self.assertRaises(ValueError): _ = vshadow_path_spec.VShadowPathSpec( parent=self._path_spec, bogus=u'BOGUS') def testComparable(self): """Tests the path specification comparable property.""" path_spec = vshadow_path_spec.VShadowPathSpec(parent=self._path_spec) self.assertNotEqual(path_spec, None) expected_comparable = u'\n'.join([ u'type: TEST', u'type: VSHADOW', u'']) self.assertEqual(path_spec.comparable, expected_comparable) path_spec = vshadow_path_spec.VShadowPathSpec( location=u'/vss2', parent=self._path_spec) self.assertNotEqual(path_spec, None) expected_comparable = u'\n'.join([ u'type: TEST', u'type: VSHADOW, location: /vss2', u'']) self.assertEqual(path_spec.comparable, expected_comparable) path_spec = vshadow_path_spec.VShadowPathSpec( store_index=1, parent=self._path_spec) self.assertNotEqual(path_spec, None) expected_comparable = u'\n'.join([ u'type: TEST', u'type: VSHADOW, store index: 1', u'']) self.assertEqual(path_spec.comparable, expected_comparable) path_spec = vshadow_path_spec.VShadowPathSpec( location=u'/vss2', store_index=1, parent=self._path_spec) self.assertNotEqual(path_spec, None) expected_comparable = u'\n'.join([ u'type: TEST', u'type: VSHADOW, location: /vss2, store index: 1', u'']) self.assertEqual(path_spec.comparable, expected_comparable) if __name__ == '__main__': unittest.main()
27.925532
73
0.700952
317
2,625
5.539432
0.167192
0.182232
0.093964
0.170843
0.862187
0.829157
0.829157
0.777904
0.771071
0.771071
0
0.006078
0.185143
2,625
93
74
28.225806
0.814867
0.091048
0
0.727273
0
0
0.084179
0
0
0
0
0
0.254545
1
0.036364
false
0
0.054545
0
0.109091
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
61589590b948adea7384ca56911a41d2e73f7a28
1,493
py
Python
code/loader.py
susht3/webQA_sequence_labelling_pytorch
7a53322b0da1f99dbc90125501daebb866741559
[ "MIT" ]
95
2017-12-14T15:17:28.000Z
2022-03-03T03:35:06.000Z
code/loader.py
Samurais/webQA_sequence_labelling_pytorch
7a53322b0da1f99dbc90125501daebb866741559
[ "MIT" ]
7
2018-03-06T14:03:26.000Z
2021-04-16T07:41:38.000Z
code/loader.py
Samurais/webQA_sequence_labelling_pytorch
7a53322b0da1f99dbc90125501daebb866741559
[ "MIT" ]
28
2018-01-30T02:58:24.000Z
2022-02-14T01:12:14.000Z
import h5py import math import torch import torch.utils.data as data class loadTrainDataset(data.Dataset): def __init__(self, path): self.file = h5py.File(path) self.nb_samples = len(self.file['question'][:]) def __getitem__(self, index): question = self.file['question'][index] evidence = self.file['evidence'][index] q_mask = self.file['q_mask'][index] e_mask = self.file['e_mask'][index] q_feat = self.file['q_feat'][index] e_feat = self.file['e_feat'][index] tags = self.file['labels'][index] answer = self.file['answer'][index] #return question, evidence, q_mask, e_mask, q_feat, e_feat, tags return question, evidence, q_mask, e_mask, q_feat, e_feat, tags, answer def __len__(self): return self.nb_samples class loadTestDataset(data.Dataset): def __init__(self, path): self.file = h5py.File(path) self.nb_samples = len(self.file['question'][:]) def __getitem__(self, index): question = self.file['question'][index] evidence = self.file['evidence'][index] q_mask = self.file['q_mask'][index] e_mask = self.file['e_mask'][index] q_feat = self.file['q_feat'][index] e_feat = self.file['e_feat'][index] answer = self.file['answer'][index] return question, evidence, q_mask, e_mask, q_feat, e_feat, answer def __len__(self): return self.nb_samples
33.177778
79
0.619558
202
1,493
4.30198
0.153465
0.174914
0.059839
0.079402
0.866513
0.866513
0.866513
0.866513
0.785961
0.785961
0
0.002648
0.241125
1,493
45
80
33.177778
0.764342
0.042197
0
0.742857
0
0
0.07972
0
0
0
0
0
0
1
0.171429
false
0
0.114286
0.057143
0.457143
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
619a8792d9b8ff9731d08f6324d73a78256f4efc
131
py
Python
util.py
snells/pyiv
80eceed048b585229bd00168167d8ae78435bf22
[ "MIT" ]
null
null
null
util.py
snells/pyiv
80eceed048b585229bd00168167d8ae78435bf22
[ "MIT" ]
null
null
null
util.py
snells/pyiv
80eceed048b585229bd00168167d8ae78435bf22
[ "MIT" ]
null
null
null
import os def listdirs(folder): return next(os.walk(folder))[1] def listfiles(folder): return next(os.walk(folder))[2]
16.375
35
0.679389
20
131
4.45
0.55
0.269663
0.359551
0.404494
0.629213
0.629213
0
0
0
0
0
0.018349
0.167939
131
7
36
18.714286
0.798165
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0.2
0.4
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
8
9cba1505fccc77c1a17f6854e8ee5d401fbab785
2,373
py
Python
cybot/plug/banhammer.py
francis-taylor/Timotty-Bot
2cf7c9897ed31d26d331594e2578b253e3b970d8
[ "MIT" ]
6
2017-10-18T14:22:48.000Z
2017-10-26T15:14:52.000Z
cybot/plug/banhammer.py
Fr4ncisTaylor/Timotty
2cf7c9897ed31d26d331594e2578b253e3b970d8
[ "MIT" ]
1
2017-10-20T19:16:03.000Z
2017-10-20T19:16:03.000Z
cybot/plug/banhammer.py
Fr4ncisTaylor/Timotty
2cf7c9897ed31d26d331594e2578b253e3b970d8
[ "MIT" ]
7
2017-10-18T14:19:52.000Z
2017-10-22T15:23:33.000Z
# -*- coding:utf-8 -*- import config from metodos import * from mensagens import * adms = config.adms def ban(msg): if msg['text'] == '/ban': from_id = msg['from']['id'] chat_id = msg['chat']['id'] if from_id in adms: try: reply_id = msg['reply_to_message']['from']['id'] reply = True except: reply = False if reply == True: if 'error_code' not in kickChatMember(chat_id, reply_id): sendMessage(chat_id, ban['banido'].format(msg['from']['first_name'])) else: sendMessage(chat_id,ban['404']) else: sendMessage(chat_id, erros['reply']) else: sendMessage(chat_id,erros['admin']) def unban(msg): if msg['text'] == '/desban': from_id = msg['from']['id'] chat_id = msg['chat']['id'] if from_id in adms: try: reply_id = msg['reply_to_message']['from']['id'] reply = True except: reply = False if reply == True: if 'error_code' not in unbanChatMember(chat_id, reply_id): sendMessage(chat_id, ban['desbanido'].format(msg['from']['first_name'])) else: sendMessage(chat_id,ban['404']) else: sendMessage(chat_id, erros['reply']) else: sendMessage(chat_id,erros['admin']) def kick(msg): if msg['text'] == '/kick': from_id = msg['from']['id'] chat_id = msg['chat']['id'] if from_id in adms: try: reply_id = msg['reply_to_message']['from']['id'] reply = True except: reply = False if reply == True: if 'error_code' not in kickChatMember(chat_id, reply_id) and 'error_code' not in unbanChatMember(chat_id, reply_id): sendMessage(chat_id, ban['kickado'].format(msg['from']['first_name'])) else: sendMessage(chat_id,ban['404']) else: sendMessage(chat_id, erros['reply']) else: sendMessage(chat_id,erros['admin']) def banhammer(msg): ban(msg) kick(msg) unban(msg)
28.590361
134
0.485461
264
2,373
4.193182
0.170455
0.119241
0.184282
0.170732
0.824752
0.824752
0.824752
0.824752
0.806685
0.806685
0
0.006761
0.376738
2,373
83
135
28.590361
0.741717
0.008428
0
0.692308
0
0
0.116071
0
0
0
0
0
0
1
0.061538
false
0
0.046154
0
0.107692
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
9cd9f509f9c600f0152e7720cd24f446d16f413c
134
py
Python
src/Stele/processing/processing_hsg/pmt_collection/__init__.py
SherwinGroup/Stele
9bb7da0b406a801975e21c9f7ce05d369ae661e5
[ "MIT" ]
null
null
null
src/Stele/processing/processing_hsg/pmt_collection/__init__.py
SherwinGroup/Stele
9bb7da0b406a801975e21c9f7ce05d369ae661e5
[ "MIT" ]
null
null
null
src/Stele/processing/processing_hsg/pmt_collection/__init__.py
SherwinGroup/Stele
9bb7da0b406a801975e21c9f7ce05d369ae661e5
[ "MIT" ]
null
null
null
__author__ = 'Sphinx' from . import high_sideband_pmt_old from . import high_sideband_pmt from . import pmt from . import time_trace
19.142857
35
0.798507
20
134
4.85
0.5
0.412371
0.28866
0.453608
0.515464
0
0
0
0
0
0
0
0.149254
134
6
36
22.333333
0.850877
0
0
0
0
0
0.044776
0
0
0
0
0
0
1
0
false
0
0.8
0
0.8
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
7
14139728a05e0ba25727abe058ec595a9cbc699d
3,798
py
Python
userbot/modules/xnxx.py
MRV1N5/XBot-Remix
a2243219c85b9e0cc83141e22fd8fc05b8dd0d49
[ "Naumen", "Condor-1.1", "MS-PL" ]
null
null
null
userbot/modules/xnxx.py
MRV1N5/XBot-Remix
a2243219c85b9e0cc83141e22fd8fc05b8dd0d49
[ "Naumen", "Condor-1.1", "MS-PL" ]
null
null
null
userbot/modules/xnxx.py
MRV1N5/XBot-Remix
a2243219c85b9e0cc83141e22fd8fc05b8dd0d49
[ "Naumen", "Condor-1.1", "MS-PL" ]
null
null
null
#Encript Py3 Marshal By ➳͜͡❂ঔৣ⃕͜x͠N͜͡ᎬᎳᏴᏆᎬ࿐ import marshal exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\xc8\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x02l\x01m\x02Z\x02m\x03Z\x03m\x04Z\x04\x01\x00d\x00d\x03l\x05m\x06Z\x06\x01\x00d\x00d\x04l\x07m\x08Z\x08\x01\x00d\x00d\x05l\tm\nZ\n\x01\x00d\x00d\x06l\x0bm\x0cZ\x0cm\rZ\r\x01\x00d\x00d\x01l\x0eZ\x0ed\x00d\x01l\x00Z\x00d\x00d\x01l\x0fZ\x0fd\x00d\x01l\x10Z\x10d\x00d\x01l\x11Z\x11d\x00d\x07l\x0fm\x12Z\x12m\x13Z\x13\x01\x00d\x00d\x08l\x14m\x15Z\x15m\x16Z\x16m\x17Z\x17m\x18Z\x18m\x19Z\x19\x01\x00d\x00d\tl\x1am\x1bZ\x1b\x01\x00e\x08d\nd\x0bd\x0c\x8d\x02d\rd\x0e\x84\x00\x83\x01Z\x1cd\x01S\x00)\x0f\xe9\x00\x00\x00\x00N)\x03\xda\x06events\xda\tfunctions\xda\x05types)\x01\xda\x13YouBlockedUserError)\x01\xda\x08register)\x01\xda\x0fdefender_kanger)\x02\xda\x17TEMP_DOWNLOAD_DIRECTORY\xda\x03bot)\x02\xda\x07randint\xda\x07uniform)\x05\xda\x05Image\xda\tImageDraw\xda\x0cImageEnhance\xda\tImageFont\xda\x08ImageOps)\x01\xda\x19DocumentAttributeFilenameTz\x0f^\\.o(?: |$)(.*))\x02Z\x08outgoingZ\x07patternc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\n\x00\x00\x00\xc3\x00\x00\x00s2\x01\x00\x00|\x00j\x00r\nd\x00S\x00|\x00j\x01\xa0\x02d\x01\xa1\x01}\x01d\x02}\x02d\x03}\x03|\x00\xa0\x03d\x04\xa1\x01I\x00d\x00H\x00\x01\x00t\x04\xa0\x05d\x05\xa1\x014\x00I\x00d\x00H\x00\x9a\xe2}\x04z>|\x04\xa0\x06t\x07j\x08d\x06d\x07d\x08\x8d\x02\xa1\x01}\x05|\x04\xa0\td\t|\x03\x9b\x00d\n|\x01\x9b\x00\x9d\x04\xa1\x01I\x00d\x00H\x00\x01\x00|\x05I\x00d\x00H\x00}\x05W\x00n6\x04\x00t\nk\nr\xb6\x01\x00\x01\x00\x01\x00|\x00\xa0\x03d\x0b\xa1\x01I\x00d\x00H\x00\x01\x00Y\x00W\x005\x00Q\x00I\x00d\x00H\x00R\x00\xa3\x00d\x00S\x00X\x00|\x00j\x0b\xa0\x0c|\x05j\rt\x0e\xa1\x02I\x00d\x00H\x00}\x06|\x00j\x0bj\x0f|\x00j\x10|\x06d\x0cd\r\x8d\x03I\x00d\x00H\x00\x01\x00t\x11\xa0\x12|\x06\xa1\x01\x01\x00|\x00\xa0\x13\xa1\x00I\x00d\x00H\x00\x01\x00|\x00j\x0b\xa0\x14|\x04j\x10t\x15j\x16|\x05j\x16g\x02\xa1\x02I\x00d\x00H\x00\x01\x00W\x005\x00Q\x00I\x00d\x00H\x00R\x00X\x00d\x00S\x00)\x0eN\xe9\x01\x00\x00\x00z\x0f@xbotgroup_xbot\xda\x04wallz\x10```Processing```z\x0f@userbotindobotTi\x89\xcbYA)\x02Z\x08incomingZ\nfrom_users\xfa\x01/\xfa\x01 z+```Unblock @xbotgroup_xbot dulu Goblok!!```F)\x01Z\x0eforce_document)\x17Z\x08fwd_fromZ\rpattern_match\xda\x05groupZ\x04editr\t\x00\x00\x00Z\x0cconversationZ\nwait_eventr\x02\x00\x00\x00Z\nNewMessageZ\x0csend_messager\x05\x00\x00\x00Z\x06clientZ\x0edownload_mediaZ\x05mediar\x08\x00\x00\x00Z\tsend_fileZ\x07chat_id\xda\x02os\xda\x06remove\xda\x06deleteZ\x0fdelete_messages\xda\x03msg\xda\x02id)\x07Z\x05eventZ\x04linkZ\x04chatr\x13\x00\x00\x00Z\x04convZ\x08responseZ\x14downloaded_file_name\xa9\x00r\x1c\x00\x00\x00\xda\x00\xda\x01_\x12\x00\x00\x00sB\x00\x00\x00\x00\x02\x06\x01\x04\x01\x0c\x01\x04\x01\x04\x01\x10\x01\x14\x01\x02\x01\x04\x01\x04\x01\x02\x01\x02\xfe\x04\xff\x04\x04\x1c\x01\x0e\x01\x0e\x01\x10\x01\x18\x02\x06\x01\x04\x01\x02\xfe\n\x04\x06\x01\x04\x01\x02\x01\x02\xfd\x0c\x05\n\x01\x0e\x01\n\x01\n\xffr\x1e\x00\x00\x00)\x1dr\x17\x00\x00\x00Z\x08telethonr\x02\x00\x00\x00r\x03\x00\x00\x00r\x04\x00\x00\x00Z\x1ctelethon.errors.rpcerrorlistr\x05\x00\x00\x00Z\x0euserbot.eventsr\x06\x00\x00\x00Z\ruserbot.utilsr\x07\x00\x00\x00Z\x07userbotr\x08\x00\x00\x00r\t\x00\x00\x00\xda\x02ioZ\x06random\xda\x02re\xda\x08textwrapr\n\x00\x00\x00r\x0b\x00\x00\x00Z\x03PILr\x0c\x00\x00\x00r\r\x00\x00\x00r\x0e\x00\x00\x00r\x0f\x00\x00\x00r\x10\x00\x00\x00Z\x11telethon.tl.typesr\x11\x00\x00\x00r\x1e\x00\x00\x00r\x1c\x00\x00\x00r\x1c\x00\x00\x00r\x1c\x00\x00\x00r\x1d\x00\x00\x00\xda\x08<module>\x01\x00\x00\x00s\x1e\x00\x00\x00\x08\x01\x14\x01\x0c\x02\x0c\x01\x0c\x01\x10\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\x10\x01\x1c\x01\x0c\x03\n\x01'))
949.5
3,738
0.782254
764
3,798
3.879581
0.340314
0.163968
0.109312
0.097166
0.173414
0.109312
0.090081
0.054993
0.054993
0.04386
0
0.313078
0.003423
3,798
3
3,739
1,266
0.467107
0.011058
0
0
0
0.5
0.989081
0.979494
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
1
1
0
0
0
0
1
1
1
null
0
0
0
0
0
0
1
0
1
0
0
0
0
10
144538a249710717e6c5179670620d6de6aea07d
605
py
Python
correct_python_programs/reverse_linked_list.py
PatrickShaw/QuixBugs
5a2eb2987fdac12860b526ffa92a57e5831fd639
[ "MIT" ]
22
2018-01-29T01:56:30.000Z
2022-03-21T12:25:40.000Z
correct_python_programs/reverse_linked_list.py
zixifan/QuixBugs
5a2eb2987fdac12860b526ffa92a57e5831fd639
[ "MIT" ]
31
2017-12-18T21:04:34.000Z
2022-02-21T07:38:09.000Z
correct_python_programs/reverse_linked_list.py
zixifan/QuixBugs
5a2eb2987fdac12860b526ffa92a57e5831fd639
[ "MIT" ]
19
2018-01-06T14:18:33.000Z
2022-03-21T12:25:43.000Z
def reverse_linked_list(node): prevnode = None while node: nextnode = node.successor node.successor = prevnode prevnode = node node = nextnode return prevnode """ def reverse_linked_list(node): prevnode = None while node: nextnode = node.successor node.successor = prevnode prevnode, node = node, nextnode return prevnode def reverse_linked_list(node): prevnode = None while node: nextnode = node.successor node.successor = prevnode node, prevnode = nextnode, node return prevnode """
20.862069
39
0.629752
63
605
5.952381
0.174603
0.208
0.128
0.16
0.898667
0.898667
0.898667
0.898667
0.898667
0.898667
0
0
0.302479
605
28
40
21.607143
0.888626
0
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.25
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
14588a51f46c45d1ce79df7745ffcb3d2155cf7e
19,201
py
Python
dycoreutils/plot_utils.py
islasimpson/dycorediags
068cd9f1855db251389b6212a7fe2ad16f12f622
[ "Apache-2.0" ]
null
null
null
dycoreutils/plot_utils.py
islasimpson/dycorediags
068cd9f1855db251389b6212a7fe2ad16f12f622
[ "Apache-2.0" ]
null
null
null
dycoreutils/plot_utils.py
islasimpson/dycorediags
068cd9f1855db251389b6212a7fe2ad16f12f622
[ "Apache-2.0" ]
null
null
null
import matplotlib.pyplot as plt import numpy as np from dycoreutils import colormap_utils as mycolors import sys def get3by3coords(): """ positioning for 3x3 plots """ x1 = [0.04,0.37,0.7,0.04,0.37,0.7,0.04,0.37,0.7] x2 = [0.31,0.64,0.97,0.31,0.64,0.97,0.31,0.64,0.97] y1 = [0.8,0.8,0.8,0.59,0.59,0.59,0.38,0.38,0.38] y2 = [0.95,0.95,0.95,0.74,0.74,0.74,0.53,0.53,0.53] return x1, x2, y1, y2 def get4by4coords(): """ positioning for 4x4 plots """ x1 = [0.04,0.28,0.52,0.76,0.04,0.28,0.52,0.76,0.04,0.28,0.52,0.76,0.04,0.28,0.52,0.76] x2 = [0.22,0.46,0.7,0.94,0.22,0.46,0.7,0.94,0.22,0.46,0.7,0.94,0.22,0.46,0.7,0.94] y1 = [0.8,0.8,0.8,0.8,0.6,0.6,0.6,0.6,0.4,0.4,0.4,0.4,0.2,0.2,0.2,0.2] y2 = [0.95,0.95,0.95,0.95,0.75,0.75,0.75,0.75,0.55,0.55,0.55,0.55,0.35,0.35,0.35,0.35] return x1, x2, y1, y2 def plotlatlinearp(fig, data, lat, pre, ci, cmin, cmax, titlestr, x1=0.1, x2=0.9, y1=0.1, y2=0.9, cmap='blue2red'): """ Plot a pressure versus latitude contour plot up to 0.01hPa. """ # set up contour levels and color map nlevs = (cmax-cmin)/ci + 1 clevs = np.arange(cmin, cmax+ci, ci) if (cmap == 'blue2red'): mymap = mycolors.blue2red_cmap(nlevs) if (cmap == 'precip'): mymap = mycolors.precip_cmap(nlevs) plt.rcParams['font.size'] = '12' ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) ax.contourf(lat,-1*pre, data, levels=clevs, cmap=mymap, extend='max') ax.contour(lat,-1*pre, data, levels=clevs[ clevs != 0], colors='black', linewidths=0.5) ax.set_ylim(-1000.,-np.log10(10)) ax.set_yticks([-1000,-800,-600,-400,-200,0]) ax.set_yticklabels(['1000','800','600','400','200','0']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) ax.set_xlabel('Latitude $^{\circ}$N') return ax def plotlatlogpre_to10(fig, data, lat, pre, ci, cmin, cmax, titlestr, x1=0.1, x2=0.9, y1=0.1, y2=0.9): """ Plot a pressure versus latitude contour plot up to 0.01hPa. """ # set up contour levels and color map nlevs = (cmax-cmin)/ci + 1 clevs = np.arange(cmin, cmax+ci, ci) mymap = mycolors.blue2red_cmap(nlevs) plt.rcParams['font.size'] = '12' ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) ax.contourf(lat,-1.*np.log10(pre), data, levels=clevs, cmap=mymap, extend='max') ax.contour(lat,-1.*np.log10(pre), data, levels=clevs[ clevs != 0], colors='black', linewidths=0.5) ax.set_ylim(-np.log10(1000.),-np.log10(10)) ax.set_yticks([-np.log10(1000),-np.log10(300),-np.log10(100),-np.log10(30),-np.log10(10)]) ax.set_yticklabels(['1000','300','100','30','10']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) ax.set_xlabel('Latitude $^{\circ}$N') return ax def plotlatlogpre_to1(fig, data, lat, pre, ci, cmin, cmax, titlestr, x1=0.1, x2=0.9, y1=0.1, y2=0.9, cmap='blue2red'): """ Plot a pressure versus latitude contour plot up to 0.01hPa. """ # set up contour levels and color map nlevs = (cmax-cmin)/ci + 1 clevs = np.arange(cmin, cmax+ci, ci) if (cmap == 'blue2red'): mymap = mycolors.blue2red_cmap(nlevs) if (cmap == 'precip'): mymap = mycolors.precip_cmap(nlevs) mymap = mycolors.blue2red_cmap(nlevs) plt.rcParams['font.size'] = '12' ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) ax.contourf(lat,-1.*np.log10(pre), data, levels=clevs, cmap=mymap, extend='max') ax.contour(lat,-1.*np.log10(pre), data, levels=clevs, colors='black', linewidths=0.5) ax.set_ylim(-np.log10(1000.),-np.log10(1)) ax.set_yticks([-np.log10(1000),-np.log10(100),-np.log10(10),-np.log10(1)]) ax.set_yticklabels(['1000','100','10','1']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) ax.set_xlabel('Latitude $^{\circ}$N') return ax def plotlatlogpre_to0p01(fig, data, lat, pre, ci, cmin, cmax, titlestr, x1=0.1, x2=0.9, y1=0.1, y2=0.9, cmap='blue2red'): """ Plot a pressure versus latitude contour plot up to 0.01hPa. """ # set up contour levels and color map nlevs = (cmax-cmin)/ci + 1 clevs = np.arange(cmin, cmax+ci, ci) if (cmap == 'blue2red'): mymap = mycolors.blue2red_cmap(nlevs) if (cmap == 'precip'): mymap = mycolors.precip_cmap(nlevs) plt.rcParams['font.size'] = '12' ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) ax.contourf(lat,-1.*np.log10(pre), data, levels=clevs, cmap=mymap, extend='max') ax.contour(lat,-1.*np.log10(pre), data, levels=clevs, colors='black', linewidths=0.5) ax.set_ylim(-np.log10(1000.),-np.log10(0.01)) ax.set_yticks([-np.log10(1000),-np.log10(100),-np.log10(10),-np.log10(1),-np.log10(0.1),-np.log10(0.01)]) ax.set_yticklabels(['1000','100','10','1','0.1','0.01']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) ax.set_xlabel('Latitude $^{\circ}$N') return ax def plotlatlogpre_0p1to0p01(fig, data, lat, pre, ci, cmin, cmax, titlestr, x1=0.1, x2=0.9, y1=0.1, y2=0.9): """ Plot a pressure versus latitude contour plot up to 0.01hPa. """ # set up contour levels and color map nlevs = (cmax-cmin)/ci + 1 clevs = np.arange(cmin, cmax+ci, ci) mymap = mycolors.blue2red_cmap(nlevs) plt.rcParams['font.size'] = '12' ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) ax.contourf(lat,-1.*np.log10(pre), data, levels=clevs, cmap=mymap, extend='max') ax.contour(lat,-1.*np.log10(pre), data, levels=clevs, colors='black', linewidths=0.5) ax.set_ylim(-np.log10(0.1),-np.log10(0.01)) ax.set_yticks([-np.log10(0.1),-np.log10(0.03),-np.log10(0.01)]) ax.set_yticklabels(['0.1','0.03','0.01']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) ax.set_xlabel('Latitude $^{\circ}$N') return ax def plotlatlogpre_100to0p01(fig, data, lat, pre, ci, cmin, cmax, titlestr, x1=0.1, x2=0.9, y1=0.1, y2=0.9): """ Plot a pressure versus latitude contour plot up to 0.01hPa. """ # set up contour levels and color map nlevs = (cmax-cmin)/ci + 1 clevs = np.arange(cmin, cmax+ci, ci) mymap = mycolors.blue2red_cmap(nlevs) plt.rcParams['font.size'] = '12' ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) ax.contourf(lat,-1.*np.log10(pre), data, levels=clevs, cmap=mymap, extend='max') ax.contour(lat,-1.*np.log10(pre), data, levels=clevs, colors='black', linewidths=0.5) ax.set_ylim(-np.log10(100.),-np.log10(0.01)) ax.set_yticks([-np.log10(100),-np.log10(10),-np.log10(1),-np.log10(0.1),-np.log10(0.01)]) ax.set_yticklabels(['100','10','1','0.1','0.01']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) ax.set_xlabel('Latitude $^{\circ}$N') return ax def plotlatlogpre_to0p01_sayc(fig, data, lat, pre, clevs, titlestr, x1=0.1, x2=0.9, y1=0.1, y2=0.9): """ Plot a pressure versus latitude contour plot up to 0.01hPa. Specify contour levels directly rather than a min and max """ # set up contour levels and color map nlevs = len(clevs) mymap = mycolors.blue2red_cmap(nlevs) plt.rcParams['font.size'] = '12' ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) ax.contourf(lat,-1.*np.log10(pre), data, levels=clevs, cmap=mymap, extend='max') ax.contour(lat,-1.*np.log10(pre), data, levels=clevs, colors='black', linewidths=0.5) ax.set_ylim(-np.log10(1000.),-np.log10(0.01)) ax.set_yticks([-np.log10(1000),-np.log10(100),-np.log10(10),-np.log10(1),-np.log10(0.1),-np.log10(0.01)]) ax.set_yticklabels(['1000','100','10','1','0.1','0.01']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) return ax def plotqbowinds(fig, data, time, pre, ci, cmin, cmax, titlestr, x1=None, x2=None, y1=None, y2=None): """ Plots a QBO time series as a function of time and log(pressure) """ # set up contour levels and color map nlevs = (cmax-cmin)/ci + 1 clevs = np.arange(cmin, cmax+ci, ci) mymap = mycolors.blue2red_cmap(nlevs) plt.rcParams['font.size'] = '12' if (x1): ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) else: ax = fig.add_axes() ax.contourf(time,-1.*np.log10(pre),data, levels=clevs, cmap=mymap, extend='max') ax.set_ylim(-np.log10(1000.),-np.log10(1)) ax.set_yticks([-np.log10(1000),-np.log10(300),-np.log10(100),-np.log10(30),-np.log10(10), -np.log10(3),-np.log10(1)]) ax.set_yticklabels(['1000','300','100','30','10','3','1']) ax.set_ylabel('Pressure (hPa)') ax.set_title(titlestr, fontsize=16) return ax def plotddamp(fig, data, pre, expname, x1=None, x2=None, y1=None, y2=None, color=None, oplot=False, ax=None): """ Plot up the Dunkerton and Delisi amplitude of the QBO. Inputs: fig = the figure page data = the dunkerton and delisi amplitude data pre = the pressure axis of data expname = the name of the experiment (for legend) x1 = the bottom edge of the figure (in units of fractions of the page) x2 = the right edge of the figure (in units of fraction of the page) y1 = the bottom edge of the figure (in units of fractions of the page) y2 = the top edge of the figure ( in units of fractions of the page) oplot = if True, only over plot a line """ # if overplotting, check for axis input if (oplot and (not ax)): print("This isn't going to work. If overplotting, specify axis") sys.exit() plt.rcParams['font.size'] = '14' if not oplot: if (x1): ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) else: ax = fig.add_axes() ax.set_ylim(-np.log10(100),-np.log10(3)) ax.set_yticks([-np.log10(100),-np.log10(30),-np.log10(10),-np.log10(3)]) ax.set_yticklabels(['100','30','10','3']) ax.set_ylabel('Pressure (hPa)', fontsize=16) ax.set_xlabel('Dunkerton and Delisi amplitude (ms$^{-1}$)',fontsize=16) ax.set_title('QBO amplitude', fontsize=16) if (color): ax.plot(np.array(data),-1.*np.log10(np.array(pre)),linewidth=3,label=expname, color=color) else: ax.plot(np.array(data),-1.*np.log10(np.array(pre)),linewidth=3,label=expname) return ax def plotprofile_linearp(fig, data, pre, expname, x1=None, x2=None, y1=None, y2=None, color=None, oplot=False, ax=None, title=None, xtitle=None, xlim=None): """ Plot a vertical profile of data from log(100) to log(0.01) Inputs: fig = the figure page data = the pressure axis of the data expname = the name of the experiemnt (for legend) x1 = the bottom edge of the figure (in units of fractions of the page) x2 = the right edge of the figure (in units of fraction of the page) y1 = the bottom edge of the figure (in units of fractions of the page) y2 = the top edge of the figure ( in units of fractions of the page) oplot = if True, only over plot a line ax = the figure axis (needed for overplotting xtitle = the title of the x axis """ # if overplotting, check for axis input if (oplot and (not ax)): print("This isn't going to work. If overplotting, specify axis") sys.exit() plt.rcParams['font.size'] = '14' if not oplot: if (x1): ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) else: ax = fig.add_axes() ax.set_ylim(-1000,0) ax.set_yticks([-1000,-800,-600,-400,-200,0]) ax.set_yticklabels(['1000','800','600','400','200','0']) ax.set_ylabel('Pressure (hPa)', fontsize=16) if (xtitle): ax.set_xlabel(xtitle) if (title): ax.set_title(title) if (color): ax.plot(np.array(data),-1*np.array(pre),linewidth=3,label=expname, color=color) else: ax.plot(np.array(data),-1.*np.array(pre),linewidth=3,label=expname) if (xlim): ax.set_xlim(xlim) return ax def plotprofile_logp_100to0p01(fig, data, pre, expname, x1=None, x2=None, y1=None, y2=None, color=None, oplot=False, ax=None, title=None, xtitle=None, xlim=None): """ Plot a vertical profile of data from log(100) to log(0.01) Inputs: fig = the figure page data = the pressure axis of the data expname = the name of the experiemnt (for legend) x1 = the bottom edge of the figure (in units of fractions of the page) x2 = the right edge of the figure (in units of fraction of the page) y1 = the bottom edge of the figure (in units of fractions of the page) y2 = the top edge of the figure ( in units of fractions of the page) oplot = if True, only over plot a line ax = the figure axis (needed for overplotting xtitle = the title of the x axis """ # if overplotting, check for axis input if (oplot and (not ax)): print("This isn't going to work. If overplotting, specify axis") sys.exit() plt.rcParams['font.size'] = '14' if not oplot: if (x1): ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) else: ax = fig.add_axes() ax.set_ylim(-np.log10(100),-np.log10(0.01)) ax.set_yticks([-np.log10(100),-np.log10(10),-np.log10(1),-np.log10(0.1),-np.log10(0.01)]) ax.set_yticklabels(['100','10','1','0.1','0.01']) ax.set_ylabel('Pressure (hPa)', fontsize=16) if (xtitle): ax.set_xlabel(xtitle) if (title): ax.set_title(title) if (color): ax.plot(np.array(data),-1.*np.log10(np.array(pre)),linewidth=3,label=expname, color=color) else: ax.plot(np.array(data),-1.*np.log10(np.array(pre)),linewidth=3,label=expname) if (xlim): ax.set_xlim(xlim) return ax def plotprofile_logp_0p1to0p01(fig, data, pre, expname, x1=None, x2=None, y1=None, y2=None, color=None, oplot=False, ax=None, title=None, xtitle=None, xlim=None): """ Plot a vertical profile of data from log(100) to log(0.01) Inputs: fig = the figure page data = the pressure axis of the data expname = the name of the experiemnt (for legend) x1 = the bottom edge of the figure (in units of fractions of the page) x2 = the right edge of the figure (in units of fraction of the page) y1 = the bottom edge of the figure (in units of fractions of the page) y2 = the top edge of the figure ( in units of fractions of the page) oplot = if True, only over plot a line ax = the figure axis (needed for overplotting xtitle = the title of the x axis """ # if overplotting, check for axis input if (oplot and (not ax)): print("This isn't going to work. If overplotting, specify axis") sys.exit() plt.rcParams['font.size'] = '14' if not oplot: if (x1): ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) else: ax = fig.add_axes() ax.set_ylim(-np.log10(0.1),-np.log10(0.01)) ax.set_yticks([-np.log10(0.1),-np.log10(0.03),-np.log10(0.01)]) ax.set_yticklabels(['0.1','0.03','0.01']) ax.set_ylabel('Pressure (hPa)', fontsize=16) if (xtitle): ax.set_xlabel(xtitle) if (title): ax.set_title(title) if (color): ax.plot(np.array(data),-1.*np.log10(np.array(pre)),linewidth=3,label=expname, color=color) else: ax.plot(np.array(data),-1.*np.log10(np.array(pre)),linewidth=3,label=expname) if (xlim): ax.set_xlim(xlim) return ax def plotposneghisto(fig, data, binmin, binmax, binint, titlestr, xtitlestr, x1, x2, y1, y2, yrange=[0,100],xticks=None, xticknames=None): """ ???? """ ax = fig.add_axes([x1, y1, x2-x1, y2-y1]) plt.rcParams['font.size'] = '12' binvals = np.arange(binmin, binmax, binint) histovals, binedges = np.histogram(data, bins=binvals) histovals = (histovals/np.size(data))*100. binedges = binedges[0:np.size(binedges)-1] ax.bar(binedges[np.where(binedges >= 0)], histovals[np.where(binedges >=0)], width=binedges[1]-binedges[0], bottom=0, align='edge', color='darkred', edgecolor='black') ax.bar(binedges[np.where(binedges < 0)], histovals[np.where(binedges < 0)], width=binedges[1]-binedges[0], bottom=0, align='edge', color='royalblue',edgecolor='black') ax.set_xlim(binmin,binmax) ax.set_ylim(yrange) ax.set_title(titlestr) ax.set_ylabel('%') ax.set_xlabel(xtitlestr) if (xticks): ax.set_xticks(xticks) ax.set_xticklabels(xticknames) return ax def plotlinetime_j2j(fig, data, x1, x2, y1, y2, titlestr, yrange=None, yticks=None, yticklabels=None, ytitle=None, linecolor=None, label=None): """ plot a line plot. Takes input from jan 1st to dec 31st and plots the line plot from July to June. Input: fig = your figure data = a 365 element array containing data to be plotted x1 = location of left edge of plot x2 = location of right edge of plot y1 = location of bottom edge of plot y2 = location of top edge of plot titlestr = plot title yrange = optional range for y axis yticks = optional ticks for y axis yticklabels = optional tick labels for y axis ytitle= optional title for y axis linecolor = optional color of line """ july1 = 181 dataplot = np.zeros([data.size]) dataplot[0:365-july1]=data[july1:365] dataplot[365-july1:365]=data[0:july1] ax = fig.add_axes([x1,y1,x2-x1,y2-y1]) monticks=[0,31,62,92,123,154,185,213,244,274,304,334,365] monticks2=np.zeros(12) for i in range(0,12): monticks2[i] = monticks[i] + (monticks[i+1]-monticks[i])/2. if (yrange): ax.set_ylim(yrange) if (yticks): ax.set_yticks(yticks) if (yticklabels): ax.set_yticklabels(yticklabels, fontsize=14) if (ytitle): ax.set_ylabel(ytitle, fontsize=14) ax.set_xlim([0,365]) ax.tick_params(which='minor', length=0) ax.set_xticks(monticks) ax.set_xticklabels([]) ax.set_xticks(monticks2, minor=True) ax.set_xticklabels(['J','A','S','O','N','D','J','F','M','A','M','J'], minor=True, fontsize=14) ax.set_title(titlestr, fontsize=16) if (linecolor): ax.plot(np.arange(0,365,1),dataplot, color=linecolor, linewidth=2, label=label) else: ax.plot(np.arange(0,365,1),dataplot, linewidth=2, label=label) return ax def oplotlinetime_j2j(ax, data, linecolor=None, label=None): """ over plot a line on a plot already created using plotlinetime_j2j""" july1 = 181 dataplot = np.zeros([data.size]) dataplot[0:365-july1]=data[july1:365] dataplot[365-july1:365]=data[0:july1] if (linecolor): ax.plot(np.arange(0,365,1),dataplot, color=linecolor, linewidth=2, label=label) else: ax.plot(np.arange(0,365,1),dataplot, linewidth=2, label=label) return ax
33.393043
143
0.610802
3,146
19,201
3.684043
0.087413
0.038827
0.021398
0.019672
0.825539
0.814582
0.803883
0.790595
0.775841
0.775841
0
0.094229
0.221238
19,201
574
144
33.45122
0.680867
0.215041
0
0.718954
0
0
0.072021
0
0
0
0
0
0
1
0.055556
false
0
0.013072
0
0.124183
0.013072
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
210397c4c6994a37e3e8ab05299f53787b562757
28,314
py
Python
utils/train_utils.py
filipe-research/PropMix
994685567a9cf9c8e99d61f381d1a9c528569d47
[ "MIT" ]
1
2021-11-26T03:47:54.000Z
2021-11-26T03:47:54.000Z
utils/train_utils.py
filipe-research/PropMix
994685567a9cf9c8e99d61f381d1a9c528569d47
[ "MIT" ]
null
null
null
utils/train_utils.py
filipe-research/PropMix
994685567a9cf9c8e99d61f381d1a9c528569d47
[ "MIT" ]
1
2021-11-22T12:11:26.000Z
2021-11-22T12:11:26.000Z
import torch import numpy as np from sklearn.mixture import GaussianMixture from utils.utils import AverageMeter, ProgressMeter import torch.nn.functional as F def simclr_train(train_loader, model, criterion, optimizer, epoch): """ Train according to the scheme from SimCLR https://arxiv.org/abs/2002.05709 """ losses = AverageMeter('Loss', ':.4e') progress = ProgressMeter(len(train_loader), [losses], prefix="Epoch: [{}]".format(epoch)) model.train() for i, batch in enumerate(train_loader): images = batch['image'] images_augmented = batch['image_augmented'] b, c, h, w = images.size() input_ = torch.cat([images.unsqueeze(1), images_augmented.unsqueeze(1)], dim=1) input_ = input_.view(-1, c, h, w) input_ = input_.cuda(non_blocking=True) targets = batch['target'].cuda(non_blocking=True) output = model(input_).view(b, 2, -1) loss = criterion(output) losses.update(loss.item()) optimizer.zero_grad() loss.backward() optimizer.step() if i % 25 == 0: progress.display(i) def scan_train(train_loader, model, criterion, optimizer, epoch, update_cluster_head_only=False): """ Train w/ SCAN-Loss """ total_losses = AverageMeter('Total Loss', ':.4e') consistency_losses = AverageMeter('Consistency Loss', ':.4e') entropy_losses = AverageMeter('Entropy', ':.4e') progress = ProgressMeter(len(train_loader), [total_losses, consistency_losses, entropy_losses], prefix="Epoch: [{}]".format(epoch)) if update_cluster_head_only: model.eval() # No need to update BN else: model.train() # Update BN for i, batch in enumerate(train_loader): # Forward pass anchors = batch['anchor'].cuda(non_blocking=True) neighbors = batch['neighbor'].cuda(non_blocking=True) if update_cluster_head_only: # Only calculate gradient for backprop of linear layer with torch.no_grad(): anchors_features = model(anchors, forward_pass='backbone') neighbors_features = model(neighbors, forward_pass='backbone') anchors_output = model(anchors_features, forward_pass='head') neighbors_output = model(neighbors_features, forward_pass='head') else: # Calculate gradient for backprop of complete network anchors_output = model(anchors) neighbors_output = model(neighbors) # Loss for every head total_loss, consistency_loss, entropy_loss = [], [], [] for anchors_output_subhead, neighbors_output_subhead in zip(anchors_output, neighbors_output): total_loss_, consistency_loss_, entropy_loss_ = criterion(anchors_output_subhead, neighbors_output_subhead) total_loss.append(total_loss_) consistency_loss.append(consistency_loss_) entropy_loss.append(entropy_loss_) # Register the mean loss and backprop the total loss to cover all subheads total_losses.update(np.mean([v.item() for v in total_loss])) consistency_losses.update(np.mean([v.item() for v in consistency_loss])) entropy_losses.update(np.mean([v.item() for v in entropy_loss])) total_loss = torch.sum(torch.stack(total_loss, dim=0)) optimizer.zero_grad() total_loss.backward() optimizer.step() if i % 25 == 0: progress.display(i) def selflabel_train(train_loader, model, criterion, optimizer, epoch, ema=None): """ Self-labeling based on confident samples """ losses = AverageMeter('Loss', ':.4e') progress = ProgressMeter(len(train_loader), [losses], prefix="Epoch: [{}]".format(epoch)) model.train() for i, batch in enumerate(train_loader): images = batch['image'].cuda(non_blocking=True) images_augmented = batch['image_augmented'].cuda(non_blocking=True) with torch.no_grad(): output = model(images)[0] output_augmented = model(images_augmented)[0] loss = criterion(output, output_augmented) losses.update(loss.item()) optimizer.zero_grad() loss.backward() optimizer.step() if ema is not None: # Apply EMA to update the weights of the network ema.update_params(model) ema.apply_shadow(model) if i % 25 == 0: progress.display(i) def scanmix_big_train(p,epoch,net,net2,optimizer,labeled_trainloader,unlabeled_trainloader,criterion,lambda_u,device): net.train() net2.eval() #fix one network and train the other final_loss = AverageMeter('Loss', ':.4e') progress = ProgressMeter(len(labeled_trainloader), [final_loss], prefix="Epoch: [{}]".format(epoch)) unlabeled_train_iter = iter(unlabeled_trainloader) num_iter = (len(labeled_trainloader.dataset)//p['batch_size'])+1 for batch_idx, (inputs_x, inputs_x2, labels_x, w_x) in enumerate(labeled_trainloader): try: inputs_u, inputs_u2 = unlabeled_train_iter.next() except: unlabeled_train_iter = iter(unlabeled_trainloader) inputs_u, inputs_u2 = unlabeled_train_iter.next() batch_size = inputs_x.size(0) # Transform label to one-hot labels_x = torch.zeros(batch_size, p['num_class']).scatter_(1, labels_x.view(-1,1), 1) w_x = w_x.view(-1,1).type(torch.FloatTensor) inputs_x, inputs_x2, labels_x, w_x = inputs_x.to(device), inputs_x2.to(device), labels_x.to(device), w_x.to(device) inputs_u, inputs_u2 = inputs_u.to(device), inputs_u2.to(device) with torch.no_grad(): # label co-guessing of unlabeled samples outputs_u11 = net(inputs_u, forward_pass='dm') outputs_u12 = net(inputs_u2, forward_pass='dm') outputs_u21 = net2(inputs_u, forward_pass='dm') outputs_u22 = net2(inputs_u2, forward_pass='dm') pu = (torch.softmax(outputs_u11, dim=1) + torch.softmax(outputs_u12, dim=1) + torch.softmax(outputs_u21, dim=1) + torch.softmax(outputs_u22, dim=1)) / 4 ptu = pu**(1/p['T']) # temparature sharpening targets_u = ptu / ptu.sum(dim=1, keepdim=True) # normalize targets_u = targets_u.detach() # label refinement of labeled samples outputs_x = net(inputs_x, forward_pass='dm') outputs_x2 = net(inputs_x2, forward_pass='dm') px = (torch.softmax(outputs_x, dim=1) + torch.softmax(outputs_x2, dim=1)) / 2 px = w_x*labels_x + (1-w_x)*px ptx = px**(1/p['T']) # temparature sharpening targets_x = ptx / ptx.sum(dim=1, keepdim=True) # normalize targets_x = targets_x.detach() # mixmatch l = np.random.beta(p['alpha'], p['alpha']) l = max(l, 1-l) all_inputs = torch.cat([inputs_x, inputs_x2, inputs_u, inputs_u2], dim=0) all_targets = torch.cat([targets_x, targets_x, targets_u, targets_u], dim=0) idx = torch.randperm(all_inputs.size(0)) input_a, input_b = all_inputs, all_inputs[idx] target_a, target_b = all_targets, all_targets[idx] mixed_input = l * input_a[:batch_size*2] + (1 - l) * input_b[:batch_size*2] mixed_target = l * target_a[:batch_size*2] + (1 - l) * target_b[:batch_size*2] logits = net(mixed_input, forward_pass='dm') Lx = -torch.mean(torch.sum(F.log_softmax(logits, dim=1) * mixed_target, dim=1)) prior = torch.ones(p['num_class'])/p['num_class'] prior = prior.to(device) pred_mean = torch.softmax(logits, dim=1).mean(0) penalty = torch.sum(prior*torch.log(prior/pred_mean)) loss = Lx + penalty # compute gradient and do SGD step optimizer.zero_grad() loss.backward() optimizer.step() final_loss.update(loss.item()) if batch_idx % 25 == 0: progress.display(batch_idx) def train(p,epoch,net,net2,optimizer,trainloader,criterion,device): net.train() net2.eval() #fix one network and train the other labeled_losses = AverageMeter('Labelled Loss', ':.4e') progress = ProgressMeter(len(trainloader), [labeled_losses], prefix="Epoch: [{}]".format(epoch)) for batch_idx, (inputs_x, inputs_x2, labels_x, w_x) in enumerate(trainloader): batch_size = inputs_x.size(0) # Transform label to one-hot labels_x = torch.zeros(batch_size, p['num_classes']).scatter_(1, labels_x.view(-1,1), 1) w_x = w_x.view(-1,1).type(torch.FloatTensor) inputs_x, inputs_x2, labels_x, w_x = inputs_x.to(device), inputs_x2.to(device), labels_x.to(device), w_x.to(device) with torch.no_grad(): # label refinement of labeled samples outputs_x11 = net(inputs_x) outputs_x12 = net(inputs_x2) outputs_x21 = net2(inputs_x) outputs_x22 = net2(inputs_x2) px = (torch.softmax(outputs_x11, dim=1) + torch.softmax(outputs_x12, dim=1) + torch.softmax(outputs_x21, dim=1) + torch.softmax(outputs_x22, dim=1)) / 4 px = w_x*labels_x + (1-w_x)*px ptx = px**(1/p['T']) # temparature sharpening targets_x = ptx / ptx.sum(dim=1, keepdim=True) # normalize targets_x = targets_x.detach() # mixmatch l = np.random.beta(p['alpha'], p['alpha']) l = max(l, 1-l) all_inputs = torch.cat([inputs_x, inputs_x2, inputs_x, inputs_x2], dim=0) all_targets = torch.cat([targets_x, targets_x, targets_x, targets_x], dim=0) idx = torch.randperm(all_inputs.size(0)) input_a, input_b = all_inputs, all_inputs[idx] target_a, target_b = all_targets, all_targets[idx] mixed_input = l * input_a + (1 - l) * input_b mixed_target = l * target_a + (1 - l) * target_b logits = net(mixed_input) Lx = criterion(logits, mixed_target) # regularization prior = torch.ones(p['num_classes'])/p['num_classes'] prior = prior.to(device) pred_mean = torch.softmax(logits, dim=1).mean(0) penalty = torch.sum(prior*torch.log(prior/pred_mean)) loss = Lx + penalty # compute gradient and do SGD step optimizer.zero_grad() loss.backward() optimizer.step() labeled_losses.update(Lx.item()) if batch_idx % 25 == 0: progress.display(batch_idx) def scanmix_big_train_proportional(p,epoch,net,net2,optimizer,trainloader,criterion,lambda_u,device): net.train() net2.eval() #fix one network and train the other labeled_losses = AverageMeter('Labelled Loss', ':.4e') unlabeled_losses = AverageMeter('Unlabelled Loss', ':.4e') progress = ProgressMeter(len(trainloader), [labeled_losses, unlabeled_losses], prefix="Epoch: [{}]".format(epoch)) num_iter = (len(trainloader.dataset)//p['batch_size'])+1 for batch_idx, (inputs_x, inputs_x2, labels_x, w_x) in enumerate(trainloader): batch_size = inputs_x.size(0) # Transform label to one-hot labels_x = torch.zeros(batch_size, p['num_class']).scatter_(1, labels_x.view(-1,1), 1) w_x = w_x.view(-1,1).type(torch.FloatTensor) inputs_x, inputs_x2, labels_x, w_x = inputs_x.to(device), inputs_x2.to(device), labels_x.to(device), w_x.to(device) with torch.no_grad(): # label refinement of labeled samples outputs_x11 = net(inputs_x, forward_pass='dm') outputs_x12 = net(inputs_x2, forward_pass='dm') outputs_x21 = net2(inputs_x, forward_pass='dm') outputs_x22 = net2(inputs_x2, forward_pass='dm') px = (torch.softmax(outputs_x11, dim=1) + torch.softmax(outputs_x12, dim=1) + torch.softmax(outputs_x21, dim=1) + torch.softmax(outputs_x22, dim=1)) / 4 px = w_x*labels_x + (1-w_x)*px ptx = px**(1/p['T']) # temparature sharpening targets_x = ptx / ptx.sum(dim=1, keepdim=True) # normalize targets_x = targets_x.detach() # mixmatch l = np.random.beta(p['alpha'], p['alpha']) l = max(l, 1-l) all_inputs = torch.cat([inputs_x, inputs_x2, inputs_x, inputs_x2], dim=0) all_targets = torch.cat([targets_x, targets_x, targets_x, targets_x], dim=0) idx = torch.randperm(all_inputs.size(0)) input_a, input_b = all_inputs, all_inputs[idx] target_a, target_b = all_targets, all_targets[idx] mixed_input = l * input_a + (1 - l) * input_b mixed_target = l * target_a + (1 - l) * target_b logits = net(mixed_input, forward_pass='dm') Lx, Lu, lamb = criterion(logits, mixed_target, logits, mixed_target,lambda_u, epoch+batch_idx/num_iter, p['warmup']) # regularization prior = torch.ones(p['num_class'])/p['num_class'] prior = prior.to(device) pred_mean = torch.softmax(logits, dim=1).mean(0) penalty = torch.sum(prior*torch.log(prior/pred_mean)) loss = Lx + lamb * Lu + penalty # compute gradient and do SGD step optimizer.zero_grad() loss.backward() optimizer.step() labeled_losses.update(Lx.item()) unlabeled_losses.update(Lu.item()) if batch_idx % 25 == 0: progress.display(batch_idx) def scanmix_big_train_proportional(p,epoch,net,net2,optimizer,trainloader,criterion,lambda_u,device): net.train() net2.eval() #fix one network and train the other labeled_losses = AverageMeter('Labelled Loss', ':.4e') unlabeled_losses = AverageMeter('Unlabelled Loss', ':.4e') progress = ProgressMeter(len(trainloader), [labeled_losses, unlabeled_losses], prefix="Epoch: [{}]".format(epoch)) # unlabeled_train_iter = iter(unlabeled_trainloader) num_iter = (len(trainloader.dataset)//p['batch_size'])+1 for batch_idx, (inputs_x, inputs_x2, labels_x, w_x) in enumerate(trainloader): # batch_size = inputs_x.size(0) # Transform label to one-hot labels_x = torch.zeros(batch_size, p['num_class']).scatter_(1, labels_x.view(-1,1), 1) w_x = w_x.view(-1,1).type(torch.FloatTensor) inputs_x, inputs_x2, labels_x, w_x = inputs_x.to(device), inputs_x2.to(device), labels_x.to(device), w_x.to(device) # inputs_u, inputs_u2 = inputs_u.to(device), inputs_u2.to(device) with torch.no_grad(): # label co-guessing of unlabeled samples # outputs_u11 = net(inputs_u, forward_pass='dm') # outputs_u12 = net(inputs_u2, forward_pass='dm') # outputs_u21 = net2(inputs_u, forward_pass='dm') # outputs_u22 = net2(inputs_u2, forward_pass='dm') # outputs_u11 = net(inputs_u, forward_pass='dm') # pu = (torch.softmax(outputs_u11, dim=1) + torch.softmax(outputs_u12, dim=1) + torch.softmax(outputs_u21, dim=1) + torch.softmax(outputs_u22, dim=1)) / 4 # ptu = pu**(1/p['T']) # temparature sharpening # targets_u = ptu / ptu.sum(dim=1, keepdim=True) # normalize # targets_u = targets_u.detach() # label refinement of labeled samples outputs_x11 = net(inputs_x, forward_pass='dm') outputs_x12 = net(inputs_x2, forward_pass='dm') outputs_x21 = net2(inputs_x, forward_pass='dm') outputs_x22 = net2(inputs_x2, forward_pass='dm') #px = (torch.softmax(outputs_x, dim=1) + torch.softmax(outputs_x2, dim=1)) / 2 px = (torch.softmax(outputs_x11, dim=1) + torch.softmax(outputs_x12, dim=1) + torch.softmax(outputs_x21, dim=1) + torch.softmax(outputs_x22, dim=1)) / 4 px = w_x*labels_x + (1-w_x)*px ptx = px**(1/p['T']) # temparature sharpening targets_x = ptx / ptx.sum(dim=1, keepdim=True) # normalize targets_x = targets_x.detach() # mixmatch l = np.random.beta(p['alpha'], p['alpha']) l = max(l, 1-l) #all_inputs = torch.cat([inputs_x, inputs_x2, inputs_u, inputs_u2], dim=0) all_inputs = torch.cat([inputs_x, inputs_x2, inputs_x, inputs_x2], dim=0) #all_targets = torch.cat([targets_x, targets_x, targets_u, targets_u], dim=0) all_targets = torch.cat([targets_x, targets_x, targets_x, targets_x], dim=0) idx = torch.randperm(all_inputs.size(0)) input_a, input_b = all_inputs, all_inputs[idx] target_a, target_b = all_targets, all_targets[idx] mixed_input = l * input_a + (1 - l) * input_b mixed_target = l * target_a + (1 - l) * target_b logits = net(mixed_input, forward_pass='dm') # logits_x = logits[:batch_size*3] # logits_u = logits[batch_size*3:] #Lx, Lu, lamb = criterion(logits_x, mixed_target[:batch_size*3], logits_u, mixed_target[batch_size*3:],lambda_u, epoch+batch_idx/num_iter, p['warmup']) Lx, Lu, lamb = criterion(logits, mixed_target, logits, mixed_target,lambda_u, epoch+batch_idx/num_iter, p['warmup']) # regularization prior = torch.ones(p['num_class'])/p['num_class'] prior = prior.to(device) pred_mean = torch.softmax(logits, dim=1).mean(0) penalty = torch.sum(prior*torch.log(prior/pred_mean)) loss = Lx + lamb * Lu + penalty # compute gradient and do SGD step optimizer.zero_grad() loss.backward() optimizer.step() labeled_losses.update(Lx.item()) unlabeled_losses.update(Lu.item()) if batch_idx % 25 == 0: progress.display(batch_idx) def warmup(epoch,net,optimizer,dataloader,criterion, conf_penalty, noise_mode, device): net.train() losses = AverageMeter('CE-Loss', ':.4e') progress = ProgressMeter(len(dataloader), [losses], prefix="Epoch: [{}]".format(epoch)) num_iter = (len(dataloader.dataset)//dataloader.batch_size)+1 for batch_idx, (inputs, labels, path) in enumerate(dataloader): inputs, labels = inputs.to(device), labels.to(device) optimizer.zero_grad() with torch.no_grad(): input_features = net(inputs, forward_pass='backbone') outputs = net(input_features, forward_pass='head') loss = criterion(outputs, labels) if noise_mode=='asym' or 'semantic' in noise_mode: # penalize confident prediction for asymmetric noise penalty = conf_penalty(outputs) L = loss + penalty elif noise_mode in ['semantic_densenet','semantic_resnet','semantic_vgg']: penalty = conf_penalty(outputs) L = loss + penalty elif noise_mode=='sym': L = loss elif noise_mode in ['1','2','3']: L = loss L.backward() optimizer.step() losses.update(L.item()) if batch_idx % 25 == 0: progress.display(batch_idx) def scanmix_big_warmup(p,epoch,net,optimizer,dataloader,criterion, conf_penalty, noise_mode, device): net.train() losses = AverageMeter('CE-Loss', ':.4e') progress = ProgressMeter(len(dataloader), [losses], prefix="Epoch: [{}]".format(epoch)) num_iter = (len(dataloader.dataset)//dataloader.batch_size)+1 for batch_idx, batch in enumerate(dataloader): inputs, labels = batch['image'].to(device), batch['target'].to(device) optimizer.zero_grad() with torch.no_grad(): input_features = net(inputs, forward_pass='backbone') outputs = net(input_features, forward_pass='dm_head') loss = criterion(outputs, labels) if p['dataset'] in ['webvision', 'mini_imagenet_blue', 'mini_imagenet_red', 'mini_imagenet32_red']: L = loss else: raise NotImplementedError() L.backward() optimizer.step() losses.update(L.item()) if batch_idx % 25 == 0: progress.display(batch_idx) def eval_train(args,model,all_loss,epoch,eval_loader,criterion,device, num_classes=100): model.eval() losses = torch.zeros(len(eval_loader.dataset)) pl = torch.zeros(len(eval_loader.dataset)) preds_classes = torch.zeros(len(eval_loader.dataset), num_classes) with torch.no_grad(): for batch_idx, (inputs, targets, index) in enumerate(eval_loader): inputs, targets = inputs.to(device), targets.to(device) outputs = model(inputs) _, predicted = torch.max(outputs, 1) eval_preds = F.softmax(outputs, -1).cpu().data loss = criterion(outputs, targets) for b in range(inputs.size(0)): losses[index[b]]=loss[b] pl[index[b]] = predicted[b] preds_classes[index[b]] = eval_preds[b] losses = (losses-losses.min())/(losses.max()-losses.min()) # normalised losses for each image all_loss.append(losses) if args.r==0.9: # average loss over last 5 epochs to improve convergence stability history = torch.stack(all_loss) input_loss = history[-5:].mean(0) input_loss = input_loss.reshape(-1,1) else: input_loss = losses.reshape(-1,1) # fit a two-component GMM to the loss gmm = GaussianMixture(n_components=2,max_iter=10,tol=1e-2,reg_covar=5e-4) gmm.fit(input_loss) prob = gmm.predict_proba(input_loss) prob = prob[:,gmm.means_.argmin()] return prob,all_loss,pl, preds_classes def scanmix_big_eval_train_classes(model, eval_loader,criterion,device, num_classes=100, output={}): model.eval() losses = torch.zeros(len(eval_loader.dataset)) pl = torch.zeros(len(eval_loader.dataset)) preds_classes = torch.zeros(len(eval_loader.dataset), num_classes) with torch.no_grad(): # for batch_idx, (inputs, targets, index) in enumerate(eval_loader): for batch_idx, batch in enumerate(eval_loader): inputs, targets = batch['image'].to(device), batch['target'].to(device) index = batch['meta']['index'] # inputs, targets = inputs.to(device), targets.to(device) outputs = model(inputs, forward_pass='dm') _, predicted = torch.max(outputs, 1) eval_preds = F.softmax(outputs, -1).cpu().data loss = criterion(outputs, targets) for b in range(inputs.size(0)): losses[index[b]]=loss[b] pl[index[b]] = predicted[b] preds_classes[index[b]] = eval_preds[b] losses = (losses-losses.min())/(losses.max()-losses.min()) # normalised losses for each image # all_loss.append(losses) # if p['r'] ==0.9: # average loss over last 5 epochs to improve convergence stability # history = torch.stack(all_loss) # input_loss = history[-5:].mean(0) # input_loss = input_loss.reshape(-1,1) # else: input_loss = losses.reshape(-1,1) # fit a two-component GMM to the loss gmm = GaussianMixture(n_components=2,max_iter=10,tol=1e-2,reg_covar=5e-4) gmm.fit(input_loss) prob = gmm.predict_proba(input_loss) prob = prob[:,gmm.means_.argmin()] output['prob'] = prob output['pl'] = pl output['pred_classes'] = preds_classes # return prob,all_loss,pl, preds_classes def scanmix_big_eval_train(p,args,model,epoch,eval_loader,criterion,device,output): model.eval() losses = torch.zeros(len(eval_loader.dataset)) pl = torch.zeros(len(eval_loader.dataset)) processed = AverageMeter('Eval train') progress = ProgressMeter(len(eval_loader), [processed], prefix="Epoch: [{}]".format(epoch)) with torch.no_grad(): for batch_idx, batch in enumerate(eval_loader): inputs, targets = batch['image'].to(device), batch['target'].to(device) index = batch['meta']['index'] outputs = model(inputs, forward_pass='dm') _, predicted = torch.max(outputs, 1) loss = criterion(outputs, targets) for b in range(inputs.size(0)): losses[index[b]]=loss[b] pl[index[b]] = predicted[b] if batch_idx % 25 == 0: progress.display(batch_idx) losses = (losses-losses.min())/(losses.max()-losses.min()) # normalised losses for each image input_loss = losses.reshape(-1,1) # fit a two-component GMM to the loss if (p['dataset'] == 'webvision'): gmm = GaussianMixture(n_components=2,max_iter=10,tol=1e-2,reg_covar=5e-4) else: gmm = GaussianMixture(n_components=2,max_iter=10,tol=1e-2,reg_covar=1e-3) gmm.fit(input_loss) prob = gmm.predict_proba(input_loss) prob = prob[:,gmm.means_.argmin()] output['prob'] = prob output['pl'] = pl def scanmix_scan(train_loader, model, criterion, optimizer, epoch, device, update_cluster_head_only=False): """ Train w/ SCAN-Loss """ total_losses = AverageMeter('Total Loss', ':.4e') consistency_losses = AverageMeter('Consistency Loss', ':.4e') entropy_losses = AverageMeter('Entropy', ':.4e') progress = ProgressMeter(len(train_loader), [total_losses, consistency_losses, entropy_losses], prefix="Epoch: [{}]".format(epoch)) if update_cluster_head_only: model.eval() # No need to update BN else: model.train() # Update BN for i, batch in enumerate(train_loader): # Forward pass anchors = batch['anchor'].to(device, non_blocking=True) neighbors = batch['neighbor'].to(device, non_blocking=True) if update_cluster_head_only: # Only calculate gradient for backprop of linear layer with torch.no_grad(): anchors_features = model(anchors, forward_pass='backbone') neighbors_features = model(neighbors, forward_pass='backbone') anchors_output = model(anchors_features, forward_pass='sl_head') neighbors_output = model(neighbors_features, forward_pass='sl_head') else: # Calculate gradient for backprop of complete network anchors_output = model(anchors, forward_pass='sl') neighbors_output = model(neighbors, forward_pass='sl') # Loss for every head total_loss, consistency_loss, entropy_loss = criterion(anchors_output, neighbors_output) # Register the mean loss and backprop the total loss to cover all subheads total_losses.update(total_loss.item()) consistency_losses.update(consistency_loss.item()) entropy_losses.update(entropy_loss.item()) optimizer.zero_grad() total_loss.backward() optimizer.step() if i % 25 == 0: progress.display(i) torch.cuda.empty_cache()
42.071322
173
0.597867
3,564
28,314
4.540685
0.082772
0.027189
0.020083
0.016808
0.880739
0.857319
0.83013
0.811531
0.790892
0.77705
0
0.019636
0.280533
28,314
673
174
42.071322
0.774778
0.121141
0
0.727473
0
0
0.039508
0
0
0
0
0
0
1
0.028571
false
0.072527
0.010989
0
0.041758
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
2143d65a0cc104615049333d026c137491ecbec5
147
py
Python
morgana/DatasetTools/morphology/__init__.py
Nikoula86/organoidSegment
b5d00256c15302ccd76b8b7a412852750476504b
[ "MIT" ]
8
2021-09-08T10:49:53.000Z
2022-02-25T13:28:03.000Z
morgana/DatasetTools/morphology/__init__.py
Nikoula86/organoidSegment
b5d00256c15302ccd76b8b7a412852750476504b
[ "MIT" ]
null
null
null
morgana/DatasetTools/morphology/__init__.py
Nikoula86/organoidSegment
b5d00256c15302ccd76b8b7a412852750476504b
[ "MIT" ]
1
2021-11-24T08:10:41.000Z
2021-11-24T08:10:41.000Z
import morgana.DatasetTools.morphology.io import morgana.DatasetTools.morphology.computemorphology import morgana.DatasetTools.morphology.overview
36.75
56
0.897959
15
147
8.8
0.466667
0.295455
0.568182
0.795455
0
0
0
0
0
0
0
0
0.040816
147
3
57
49
0.93617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
216585048cb8d2d65bf065ec5abaca9ce376d36e
2,056
py
Python
world_manager.py
hamolicious/Ray-Tracer
1af4cab7bccc93e12507c57f56fb2c868e65c745
[ "Apache-2.0" ]
null
null
null
world_manager.py
hamolicious/Ray-Tracer
1af4cab7bccc93e12507c57f56fb2c868e65c745
[ "Apache-2.0" ]
null
null
null
world_manager.py
hamolicious/Ray-Tracer
1af4cab7bccc93e12507c57f56fb2c868e65c745
[ "Apache-2.0" ]
null
null
null
from camera import Camera import pygame def generate_world(size): world = [ [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1], [1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1], [1, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] ] tile_width = size[0] / len(world[0]) tile_depth = size[1] / len(world) temp = [] for y in range(len(world)): for x in range(len(world[0])): if world[y][x] == 1: temp.append(pygame.Rect(x * tile_width, y * tile_depth, tile_width, tile_depth)) elif world[y][x] == 2: avrg_size = (tile_width + tile_depth) / 2 size = avrg_size / 4 player = Camera(370, 165, size) player.heading = 0 return temp, player
39.538462
97
0.355058
484
2,056
1.485537
0.066116
0.667594
0.888734
1.040334
0.554937
0.554937
0.554937
0.554937
0.552156
0.552156
0
0.333601
0.394942
2,056
51
98
40.313725
0.244373
0
0
0.263158
1
0
0
0
0
0
0
0
0
1
0.026316
false
0
0.052632
0
0.105263
0
0
0
1
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
0d404bc2e22b1f537b5bf4e38856d930f796833b
16,014
py
Python
ports/esp32/modules/imagedata.py
oshwabadge2018/micropython
1c1584fc37a13a3407da341dd443f6d76644c26d
[ "MIT" ]
3
2018-10-02T02:13:11.000Z
2018-10-08T20:49:56.000Z
ports/esp32/modules/imagedata.py
oshwabadge2018/micropython
1c1584fc37a13a3407da341dd443f6d76644c26d
[ "MIT" ]
1
2018-10-04T06:44:57.000Z
2018-10-04T15:34:46.000Z
ports/esp32/modules/imagedata.py
acamilo/micropython
533d3d0c56ad0e822cb923ef3ff86f09cd5df118
[ "MIT" ]
2
2018-08-29T02:55:32.000Z
2018-09-23T04:44:21.000Z
ohslogo = b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x06\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x09\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x09\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x04\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x00\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x03\xff\xe0\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x00\xff\xe0\x00\xff\xff\xff\xff\xff\xff\xf8\x0f\xff\xff\xff\xe0\xf0\x7f\xe0\x01\xff\xff\xff\xff\xff\xff\xe0\x03\xff\xff\xff\xc1\xfc\x3f\xe0\x01\xff\xff\xff\xff\xff\xff\x80\x00\x7f\xff\xff\x87\xfe\x1f\xc0\x03\xff\xff\xff\xff\xff\xfe\x07\xf0\x3f\xff\xff\x0f\xff\x1f\xc0\x07\xff\xff\xff\xff\xff\xfc\x1f\xfe\x0f\xff\xff\x1f\xff\x8f\x80\x07\xff\xff\xff\xff\xff\xf8\x7f\xff\x87\xff\xff\x1f\xff\x8f\xc0\x0f\xff\xff\xff\xff\xff\xf0\xff\xff\xc3\xff\xff\x3f\xff\xc7\x81\x1f\xff\xff\xff\xff\xff\xe1\xff\xff\xe1\xff\xff\x3f\xff\xc7\x81\xff\xff\xff\xff\xff\xff\xc3\xff\xff\xf1\xff\xff\x1f\xff\xc7\x1f\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xf8\xff\xff\x1f\xff\xe2\x3f\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xfc\x7f\xff\x1f\xff\xe0\x7f\xff\xff\xff\xff\xff\xff\x9f\xff\xff\xfc\x7f\xff\x8f\xff\xe0\xff\xff\xff\xff\xff\xff\xff\x1f\xff\xff\xfe\x3f\xff\xc7\xff\xe1\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\x3f\xff\xc3\xff\xc1\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\x1f\xff\xe0\xfe\x03\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\x9f\xff\xf8\x00\x11\xff\xff\xff\xff\xff\xff\xfe\x7f\xff\xff\xff\x8f\xff\xfc\x00\x71\xff\xff\xff\xff\xff\xff\xfe\x7f\xff\xff\xff\x8f\xff\xff\xcf\xf1\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xff\xcf\xff\xff\xff\xf1\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xff\xc7\xff\xff\xff\xf1\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xc7\xff\xff\xff\xf3\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xe3\xff\xff\xff\xf3\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xe3\xff\xff\xff\xf3\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xe3\xff\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xf3\xff\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xf1\xff\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xf1\xff\xff\xff\xe7\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xf9\xff\xff\xff\xc7\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xf8\xff\xff\xff\xc7\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xff\xf8\xff\xff\xff\xcf\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xff\xfc\xff\xff\xff\x8f\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xff\xfc\x7f\xff\xff\x8f\xff\xff\xff\xff\xff\xff\xfe\x7f\xff\xff\xff\xfe\x7f\xff\xff\x1f\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xfe\x3f\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\x3f\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\x1f\xff\xfc\x7f\xff\xff\xff\xff\xff\xff\xff\x1f\xff\xff\xff\xff\x8f\xff\xf8\x7f\xff\xff\xff\xff\xff\xff\xff\x1f\xff\xff\x8f\xff\x87\xff\xe0\xff\xff\xff\xff\xff\xff\xff\xff\x8f\xff\xc0\x00\x0f\xc3\xff\xc1\xff\xff\xff\xff\xff\xff\xff\xff\xc7\xff\x80\x00\x0f\xe0\xff\x07\xff\xff\xff\xff\xff\xff\xff\xff\xc3\xff\x80\x07\xcf\xf8\x00\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xe1\xff\x80\x06\x4f\xfe\x00\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xf0\xfe\x00\x06\x4f\xff\x81\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x3c\x00\x06\x4f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x00\x00\x06\x4f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x80\x00\x02\x4f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x00\x02\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x00\x00\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x00\x00\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x80\x03\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x07\xff\xff\xf0\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe7\xff\xff\xff\x80\x00\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\xf0\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x1f\xff\x83\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x7f\xff\xe3\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\xff\xff\xf1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf1\xff\xff\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe3\xff\xff\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc3\xff\xff\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xf1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xf1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xe1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xc7\xff\xff\xff\xff\xff\xf9\xf8\x7f\xff\xff\xff\xff\x9f\xff\xff\x8f\xff\xff\xff\xff\xff\xfc\x00\x07\xff\xff\xff\xff\x1f\xff\xff\x0f\xff\xff\xff\xff\xff\xff\x87\xe1\xff\xff\xff\xff\x1f\xff\xfc\x3f\xff\xff\xff\xff\xff\xff\xe7\xe0\x07\xff\xff\xff\x1f\xff\xf0\x7f\xff\xff\xff\xff\xff\xff\xf1\xe0\x00\x01\xff\xff\x1f\xff\x80\xff\xff\xff\xff\xff\xff\xff\xfc\x07\xc0\x00\x00\x02\x1f\xc0\x03\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x00\x00\x00\x00\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x11\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x00\x3e\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x00\x00\x00\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x00\x00\x00\x00\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x1f\xff\xfc\x00\x00\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\x81\xff\xff\xfc\x7f\xc0\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\x0f\xff\xff\xfc\xff\xfe\x00\x3f\xff\xff\xff\xff\xff\xff\xff\xfe\x1f\xff\xff\xf8\xff\xff\x80\x1f\xff\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xf8\xff\xff\xf0\xc3\xff\xff\xff\xff\xff\xff\xff\xf8\xff\xff\xff\xf1\xff\xff\xfd\xf9\xff\xff\xff\xff\xff\xff\xff\xf1\xff\xff\xff\xf1\xff\xff\xfc\xfd\xff\xff\xff\xff\xff\xff\xff\xf1\xff\xff\xff\xe3\xff\xff\xfe\x3c\xff\xff\xff\xff\xff\xff\xff\xe3\xff\xff\xff\xe3\xff\xff\xff\x00\x7f\xff\xff\xff\xff\xff\xff\xe7\xff\xff\xff\xc7\xff\xff\xff\xc3\x1f\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\x8f\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\x0f\xff\xff\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xfe\x1f\xff\xff\xff\xff\xfb\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xfc\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xf8\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xcf\xff\xff\xe0\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xc1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc3\xff\xff\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe1\xff\xf8\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x1f\x80\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x00\x01\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x00\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x9f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x31\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1c\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x87\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe7\xc7\xff\xff\xff\xff\xff\xff\xff\xf0\x3f\xff\xff\xff\xff\xff\xf3\xf0\xff\xff\xff\xff\xff\xff\xfc\x00\x00\x7f\xff\xff\xff\xff\xf0\x3e\x3f\xff\xff\xff\xff\xff\xf0\x00\x00\x0f\xff\xff\xe0\x00\x30\x8f\x87\xff\xff\xff\xff\xff\xc0\x1f\xc0\x03\xff\xff\x00\x04\x02\x81\xe0\xff\xff\xff\xff\xff\x03\xff\xfe\x00\xff\xfc\x00\x07\x8f\x80\x3c\x3f\xff\xff\xff\xfe\x0f\xff\xff\xc0\x3f\xf0\x0f\xe0\x1f\xc0\x07\x1f\xff\xff\xff\xf8\x3f\xff\xff\xf8\x0f\xc0\x7f\xfe\x00\x7f\x81\x9f\xff\xff\xff\xf0\xff\xff\xff\xfe\x07\x03\xff\xff\xff\x03\xe0\x3f\xff\xff\xff\xe1\xff\xff\xff\xff\x80\x0f\xff\xff\xff\xf0\x7c\x3f\xff\xff\xff\xc3\xff\xff\xff\xff\xc0\x1f\xff\xff\xff\xfc\x0f\x3f\xff\xff\xff\x87\xff\xff\xff\xff\xf0\x3f\xff\xff\xff\xff\xc1\x3f\xff\xff\xff\x0f\xff\xff\xff\xff\xe0\x3f\xff\xff\xff\xff\xf8\x3f\xff\xff\xfe\x1f\xff\xff\xff\xff\xc0\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\x87\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xff\xff\x0f\x87\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x7f\xff\xff\xff\xfe\x1f\xc3\xff\xff\xff\xff\xff\xff\xff\xff\xf8\xff\xff\xff\xff\xfe\x3f\xe1\xff\xff\xff\xff\xff\xff\xff\xff\xf0\xff\xff\xff\xff\xfc\x7f\xf1\xff\xff\xff\xff\xff\xff\xff\xff\xf1\xff\xff\xff\xff\xf8\x7f\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xe1\xff\xff\xff\xff\xf8\xff\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xe3\xff\xff\xff\xff\xf1\xff\xfc\x7f\xff\xff\xff\xff\xff\xff\xff\xe3\xff\xff\xff\xff\xf1\xff\xfc\x7f\xbf\xff\xff\xff\xff\xff\xff\xc3\xff\xff\xff\xff\xe1\xff\xfe\x7f\x1f\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\xff\xe3\xff\xfe\x3e\x47\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\xff\xe3\xff\xfe\x3e\x81\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\xff\xc3\xff\xfe\x3c\x99\xff\xff\xff\xff\xff\xff\x87\xff\xff\xff\xff\xc3\xff\xfe\x39\x1b\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xc3\xff\xfe\x3b\x33\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xe3\xff\xfe\x33\x33\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xe3\xff\xfc\x66\x67\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xe3\xff\xfc\x6c\x67\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xe3\xff\xf8\x4c\xcf\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xf1\xff\xf8\x98\xdf\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xf1\xff\xf0\xb0\xdf\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xf8\xff\xe1\x31\x9f\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xfc\x7f\xc3\x63\xbf\xff\xff\xff\xff\xff\xff\x8f\xff\xff\xff\xff\xfe\x0e\x06\xc3\x3f\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\xff\xff\x00\x0c\xe7\x7f\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\xff\xff\x80\x3d\xf7\x7f\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\xff\xff\xfb\xf9\xb6\xff\xff\xff\xff\xff\xff\xff\xc7\xff\xff\xff\xff\xff\xff\xf3\xa6\xff\xff\xff\xff\xff\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xe6\x0c\xff\xff\xff\xff\xff\xff\xff\xe3\xff\xff\xff\xff\xff\xff\xe0\x1d\xff\xff\xff\xff\xff\xff\xff\xe1\xff\xff\xff\xff\xff\xff\xc1\xbd\xff\xff\xff\xff\xff\xff\xff\xf1\xff\xff\xff\xff\xff\xff\x9b\x99\xff\xff\xff\xff\xff\xff\xff\xf8\xff\xff\xff\xff\xff\xff\x33\x9b\xff\xff\xff\xff\xff\xff\xff\xf8\xff\xff\xff\xff\xff\xff\x67\x9b\xff\xff\xff\xff\xff\xff\xff\xfc\x7f\xff\xff\xff\xff\xfe\x0f\xb3\xff\xff\xff\xff\xff\xff\xff\xfe\x3f\xff\xff\xff\xff\xfe\x0f\xb7\xff\xff\xff\xff\xff\xff\xff\xff\x1f\xff\xff\xff\xff\xfc\x3f\xa7\xff\xff\xff\xff\xff\xff\xff\xff\x0f\xff\xff\xff\xff\xf8\x7f\x87\xff\xff\xff\xff\xff\xff\xff\xff\x87\xff\xff\xff\xff\xe0\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc1\xff\xff\xff\xff\xc1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\xff\xff\xff\xff\x83\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8\x1f\xff\xff\xfe\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x03\xff\xff\xf0\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x7f\xff\x80\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x03\xf0\x01\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x00\x00\x07\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x1f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
8,007
16,013
0.749844
4,002
16,014
3.0005
0.01924
1.403564
1.837775
2.147568
0.94437
0.924384
0.904647
0.874167
0.838941
0.807961
0
0.051402
0.000187
16,014
1
16,014
16,014
0.698582
0
0
0
0
1
0.999126
0.999126
0
1
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
1
1
1
0
0
0
0
0
1
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
16
b4ab6f4b520ab0f44fb483f4ac879a3fdc124ace
129
py
Python
mah_nuh_fahl_brah.py
fhebal/readmission-risk-app
ca7cf1664f0a64a8e88e39eaacaa06aaa7118d49
[ "MIT" ]
null
null
null
mah_nuh_fahl_brah.py
fhebal/readmission-risk-app
ca7cf1664f0a64a8e88e39eaacaa06aaa7118d49
[ "MIT" ]
null
null
null
mah_nuh_fahl_brah.py
fhebal/readmission-risk-app
ca7cf1664f0a64a8e88e39eaacaa06aaa7118d49
[ "MIT" ]
null
null
null
print("okay fahn brahm i'ma commit to a separate branch not directly to main, cuz otherwise it'll break if i submit crap code.")
64.5
128
0.767442
25
129
3.96
0.92
0
0
0
0
0
0
0
0
0
0
0
0.170543
129
1
129
129
0.925234
0
0
0
0
1
0.922481
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
7
b4bf3905816ae0789bef4f8a5b5bf4294575ec88
72
py
Python
bigtiffp/__init__.py
pablo-munoz/bigtiffp
7d1c3595cbff41c202c70bc416c6c5c13f63cf5e
[ "MIT" ]
3
2018-07-24T21:35:37.000Z
2018-10-05T20:42:25.000Z
bigtiffp/__init__.py
pablo-munoz/bigtiffp
7d1c3595cbff41c202c70bc416c6c5c13f63cf5e
[ "MIT" ]
null
null
null
bigtiffp/__init__.py
pablo-munoz/bigtiffp
7d1c3595cbff41c202c70bc416c6c5c13f63cf5e
[ "MIT" ]
null
null
null
from .bigtiffp import BIGTIFF_VERSION from .bigtiffp import is_bigtiff
18
37
0.847222
10
72
5.9
0.6
0.40678
0.610169
0
0
0
0
0
0
0
0
0
0.125
72
3
38
24
0.936508
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
b4cc20e93d6daa8d746e6302da8931dca7f48b59
3,361
py
Python
src/tests/unit/fixtures/audit_remediation/mock_templates.py
kphsugntuedutw/carbon-black-cloud-sdk-python
5277be067223bc3eda0348c57b2a0004fa70f3e9
[ "MIT" ]
24
2020-10-16T22:07:38.000Z
2022-03-24T14:58:03.000Z
src/tests/unit/fixtures/audit_remediation/mock_templates.py
kphsugntuedutw/carbon-black-cloud-sdk-python
5277be067223bc3eda0348c57b2a0004fa70f3e9
[ "MIT" ]
63
2020-10-26T18:26:15.000Z
2022-03-31T17:31:02.000Z
src/tests/unit/fixtures/audit_remediation/mock_templates.py
kphsugntuedutw/carbon-black-cloud-sdk-python
5277be067223bc3eda0348c57b2a0004fa70f3e9
[ "MIT" ]
10
2020-11-09T11:54:23.000Z
2022-03-24T20:44:00.000Z
"""Mock data for Live Query templates""" EXAMPLE_TEMPLATE = { 'id': 'xzllqfvlie2bzghqqfkxk9xizqniwcvr', 'name': 'CBC SDK', 'created_by': 'ABCDE12345', 'create_time': '2020-12-10T23:42:28.359Z', 'update_time': '2020-12-10T23:42:28.359Z', 'notify_on_finish': False, 'device_filter': { 'policy_id': None, 'os': ['WINDOWS'], 'device_id': None, 'deployment_type': None, 'policy_ids': None, 'device_types': ['WINDOWS'], 'device_ids': None }, 'sql': 'SELECT name, VERSION, install_date FROM programs;', 'last_run_create_time': None, 'next_run_time': '2020-12-11T18:30:00.000Z', 'schedule': { 'status': 'ACTIVE', 'recurrence': 'DAILY', 'timezone': 'America/New_York', 'rrule': 'FREQ=DAILY;BYHOUR=13;BYMINUTE=30;BYSECOND=0', 'previous_run_time': None, 'next_run_time': '2020-12-11T18:30:00.000Z', 'cancellation_time': None, 'cancelled_by': None }, 'recommended_query_id': None, 'schema': None, 'destinations': ['LQ'] } EXAMPLE_TEMPLATE_REFRESH = { 'id': 'xzllqfvlie2bzghqqfkxk9xizqniwcvr', 'name': 'CBC SDK', 'created_by': 'ABCDE12345', 'create_time': '2020-12-10T23:42:28.359Z', 'update_time': '2020-12-10T23:42:28.359Z', 'notify_on_finish': False, 'device_filter': { 'policy_id': None, 'os': ['WINDOWS'], 'device_id': None, 'deployment_type': None, 'policy_ids': None, 'device_types': ['WINDOWS'], 'device_ids': None }, 'sql': 'SELECT name, VERSION, install_date FROM programs;', 'last_run_create_time': '2020-12-11T18:30:00.000Z', 'next_run_time': '2020-12-12T18:30:00.000Z', 'schedule': { 'status': 'ACTIVE', 'recurrence': 'DAILY', 'timezone': 'America/New_York', 'rrule': 'FREQ=DAILY;BYHOUR=13;BYMINUTE=30;BYSECOND=0', 'previous_run_time': '2020-12-11T18:30:00.000Z', 'next_run_time': '2020-12-12T18:30:00.000Z', 'cancellation_time': None, 'cancelled_by': None }, 'recommended_query_id': None, 'schema': None, 'destinations': ['LQ'] } EXAMPLE_TEMPLATE_STOPPED = { 'id': 'xzllqfvlie2bzghqqfkxk9xizqniwcvr', 'name': 'CBC SDK', 'created_by': 'ABCDE12345', 'create_time': '2020-12-10T23:42:28.359Z', 'update_time': '2020-12-10T23:58:48.283Z', 'notify_on_finish': False, 'device_filter': { 'policy_id': None, 'os': ['WINDOWS'], 'device_id': None, 'deployment_type': None, 'policy_ids': None, 'device_types': ['WINDOWS'], 'device_ids': None }, 'sql': 'SELECT name, VERSION, install_date FROM programs;', 'last_run_create_time': None, 'next_run_time': None, 'schedule': { 'status': 'CANCELLED', 'recurrence': 'DAILY', 'timezone': 'America/New_York', 'rrule': 'FREQ=DAILY;BYHOUR=13;BYMINUTE=30;BYSECOND=0', 'previous_run_time': None, 'next_run_time': None, 'cancellation_time': '2020-12-10T23:58:48.283Z', 'cancelled_by': 'ABCDE12345' }, 'recommended_query_id': None, 'schema': None, 'destinations': ['LQ'] } EXAMPLE_TEMPLATE_HISTORY = { "org_key": "TEST", "num_found": 1, "results": [EXAMPLE_TEMPLATE] }
30.554545
63
0.587028
382
3,361
4.926702
0.235602
0.05526
0.069075
0.055792
0.913921
0.913921
0.909671
0.893199
0.893199
0.893199
0
0.102454
0.236239
3,361
109
64
30.834862
0.630697
0.010116
0
0.778846
0
0
0.546823
0.161698
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2ecd68717a8d59ce386ef431a5bd24143185f515
32,017
py
Python
sifra/modelling/system_topology.py
sheecegardezi/sifra
a7af896159ea7db231e23aeab187b7493887a080
[ "Apache-2.0" ]
null
null
null
sifra/modelling/system_topology.py
sheecegardezi/sifra
a7af896159ea7db231e23aeab187b7493887a080
[ "Apache-2.0" ]
null
null
null
sifra/modelling/system_topology.py
sheecegardezi/sifra
a7af896159ea7db231e23aeab187b7493887a080
[ "Apache-2.0" ]
null
null
null
from __future__ import print_function import os import networkx as nx import re import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt # from networkx.readwrite.json_graph import node_link_data # ----------------------------------------------------------------------------- class SystemTopology(object): orientation = "LR" # Orientation of graph - Graphviz option connector_type = "spline" # Connector appearance - Graphviz option clustering = False # Cluster based on defined `node_cluster` out_file = "system_topology" graph_label = "System Component Topology" def __init__(self, infrastructure, scenario): self.infrastructure = infrastructure self.scenario = scenario self.gviz = "" # placeholder for a pygraphviz agraph self.component_attr = {} # Dict for system comp attributes self.out_dir = "" for comp_id in infrastructure.components.keys(): self.component_attr[comp_id] = \ vars(infrastructure.components[comp_id]) self.graph_label = "System Component Topology" self.out_dir = scenario.output_path if infrastructure.system_class.lower() in \ ['potablewatertreatmentplant', 'pwtp', 'wastewatertreatmentplant', 'wwtp', 'substation']: self.orientation = "TB" self.connector_type = "ortho" self.clustering = True elif infrastructure.system_class.lower() in \ ['powerstation']: self.orientation = "LR" self.connector_type = "ortho" self.clustering = True else: self.orientation = "TB" self.connector_type = "polyline" self.clustering = False # Default drawing program self.drawing_prog = 'dot' # Overwrite default if node locations are defined if hasattr(infrastructure, 'system_meta'): if infrastructure.system_meta['component_location_conf']['value']\ == 'defined': self.drawing_prog = 'neato' def draw_sys_topology(self, viewcontext): if self.infrastructure.system_class.lower() in ['substation']: self.draw_substation_topology(viewcontext) elif self.infrastructure.system_class.lower() in [ "potablewatertreatmentplant", "pwtp", "wastewatertreatmentplant", "wwtp", "watertreatmentplant", "wtp"]: self.draw_wtp_topology(viewcontext) else: self.draw_generic_sys_topology(viewcontext) def draw_generic_sys_topology(self, viewcontext): """ Draws the component configuration for a given infrastructure system. :param viewcontext: Option "as-built" indicates topology of system prior to hazard impact. Other options can be added to reflect post-impact system configuration and alternate designs. :return: generates and saves the system topology diagram in the following formats: (graphviz) dot, png, svg. """ # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Set up output file names & location if not self.out_dir.strip(): output_path = os.getcwd() else: output_path = self.out_dir # strip away file ext and add our own fname = self.out_file.split(os.extsep)[0] # Orientation of the graph (default is top-to-bottom): if self.orientation.upper() not in ['TB', 'LR', 'RL', 'BT']: self.orientation = 'TB' # `connector_type` refers to the line connector type. Must be one of # the types supported by Graphviz (i.e. 'spline', 'ortho', 'line', # 'polyline', 'curved') if self.connector_type.lower() not in \ ['spline', 'ortho', 'line', 'polyline', 'curved']: self.connector_type = 'ortho' # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Draw graph using pygraphviz. Define general node & edge attributes. G = self.infrastructure._component_graph.digraph graphml_file = os.path.join(output_path, fname + '.graphml') G.write_graphml(graphml_file) elist = G.get_edgelist() named_elist = [] for tpl in elist: named_elist.append((G.vs[tpl[0]]['name'], G.vs[tpl[1]]['name'])) nxG = nx.DiGraph(named_elist) self.gviz = nx.nx_agraph.to_agraph(nxG) default_node_color = "royalblue3" default_edge_color = "royalblue2" self.gviz.graph_attr.update( concentrate=False, resolution=300, directed=True, labelloc="t", label='< '+self.graph_label+'<BR/><BR/> >', rankdir=self.orientation, #ranksep="1.0 equally", splines=self.connector_type, center="true", forcelabels=True, fontname="Helvetica-Bold", fontcolor="#444444", fontsize=26, smoothing="graph_dist", pad=0.5, nodesep=1.5, sep=1.0, overlap="voronoi", overlap_scaling=1.0, ) self.gviz.node_attr.update( shape="circle", style="rounded,filled", fixedsize="true", width=1.8, height=1.8, xlp="0, 0", color=default_node_color, # gray14 fillcolor="white", fontcolor=default_node_color, # gray14 penwidth=1.5, fontname="Helvetica-Bold", fontsize=18, ) self.gviz.edge_attr.update( arrowhead="normal", arrowsize="1.0", style="bold", color=default_edge_color, # gray12 penwidth=1.2, ) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Customise nodes based on node type or defined clusters for node in self.component_attr.keys(): label_mod = self.segment_long_labels(node, delims=['_', ' ']) self.gviz.get_node(node).attr['label'] = label_mod if str(self.component_attr[node]['node_type']).lower() == 'supply': self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=12, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="rect", rank="supply", style="rounded,filled", fixedsize="true", color="limegreen", fillcolor="white", fontcolor="limegreen", penwidth=2.0, height=1.2, width=2.2, ) if str(self.component_attr[node]['node_type']).lower() == 'sink': self.gviz.get_node(node).attr.update( shape="doublecircle", rank="sink", penwidth=2.0, color="orangered", # royalblue3 fillcolor="white", fontcolor="orangered", # royalblue3 ) if str(self.component_attr[node]['node_type']).lower() \ == 'dependency': self.gviz.get_node(node).attr.update( shape="circle", rank="dependency", penwidth=3.5, color="orchid", fillcolor="white", fontcolor="orchid" ) if str(self.component_attr[node]['node_type']).lower() \ == 'junction': self.gviz.get_node(node).attr.update( shape="point", width=0.5, height=0.5, penwidth=3.5, color=default_node_color, ) # Clustering: whether to create subgraphs based on `node_cluster` # designated for components node_clusters = list(set([self.component_attr[k]['node_cluster'] for k in self.component_attr.keys()])) if self.clustering: for cluster in node_clusters: grp = [k for k in self.component_attr.keys() if self.component_attr[k]['node_cluster'] == cluster] cluster = '_'.join(cluster.split()) if cluster.lower() not in ['none', '']: cluster_name = 'cluster_'+cluster rank = 'same' else: cluster_name = '' rank = '' self.gviz.add_subgraph( nbunch=grp, name=cluster_name, style='invis', label='', clusterrank='local', rank=rank, ) for node in self.component_attr.keys(): pos_x = self.component_attr[node]['longitude'] pos_y = self.component_attr[node]['latitude'] if pos_x and pos_y: node_pos = str(pos_x)+","+str(pos_y)+"!" self.gviz.get_node(node).attr.update(pos=node_pos) # self.gviz.layout(prog=self.drawing_prog) if viewcontext == "as-built": self.gviz.write(os.path.join(output_path, fname + '_gv.dot')) self.gviz.draw(os.path.join(output_path, fname + '_dot.png'), format='png', prog='dot', args='-Gdpi=300') self.gviz.draw(os.path.join(output_path, fname + '.png'), format='png', prog=self.drawing_prog, args='-n2') self.gviz.draw(os.path.join(output_path, fname + '.svg'), format='svg', prog=self.drawing_prog) # nx.readwrite.json_graph.node_link_data(self.gviz, # os.path.join(output_path, fname + '.json')) # ========================================================================== def draw_substation_topology(self, viewcontext): """ Draws the component configuration for a substation. :param viewcontext: Option "as-built" indicates topology of system prior to hazard impact. Other options can be added to reflect post-impact system configuration and alternate designs. :return: generates and saves the system topology diagram in the following formats: (graphviz) dot, png, svg. """ # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Set up output file names & location if not self.out_dir.strip(): output_path = os.getcwd() else: output_path = self.out_dir # strip away file ext and add our own fname = self.out_file.split(os.extsep)[0] # Orientation of the graph (default is top-to-bottom): self.orientation = 'TB' # `connector_type` refers to the line connector type. Must be one of # ['spline', 'ortho', 'line', 'polyline', 'curved'] self.connector_type = 'ortho' self.drawing_prog = 'neato' # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - G = self.infrastructure._component_graph.digraph graphml_file = os.path.join(output_path, fname + '.graphml') G.write_graphml(graphml_file) elist = G.get_edgelist() named_elist = [] for tpl in elist: named_elist.append((G.vs[tpl[0]]['name'], G.vs[tpl[1]]['name'])) nxG = nx.DiGraph(named_elist) self.gviz = nx.nx_agraph.to_agraph(nxG) default_node_color = "royalblue3" default_edge_color = "royalblue2" self.gviz.graph_attr.update( directed=True, concentrate=False, resolution=300, orientation="portrait", labelloc="t", label='< '+self.graph_label+'<BR/><BR/> >', bgcolor="white", rankdir=self.orientation, # ranksep="1.0 equally", splines=self.connector_type, center="true", forcelabels=True, fontname="Helvetica-Bold", fontcolor="#444444", fontsize=26, # smoothing="graph_dist", smoothing="none", pad=0.5, pack=False, sep="+20", # overlap=False, # overlap="voronoi", # overlap_scaling=1.0, ) self.gviz.node_attr.update( shape="circle", style="rounded,filled", fixedsize="true", width=0.2, height=0.2, color=default_node_color, # gray14 fillcolor="white", fontcolor=default_node_color, # gray14 penwidth=1.5, fontname="Helvetica-Bold", fontsize=12, ) self.gviz.edge_attr.update( arrowhead="normal", arrowsize="0.7", style="bold", color=default_edge_color, # gray12 penwidth=1.0, ) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Clustering: whether to create subgraphs based on `node_cluster` # designated for components node_clusters = list(set([self.component_attr[k]['node_cluster'] for k in self.component_attr.keys()])) if self.clustering: for cluster in node_clusters: grp = [k for k in self.component_attr.keys() if self.component_attr[k]['node_cluster'] == cluster] cluster = '_'.join(cluster.split()) if cluster.lower() not in ['none', '']: cluster_name = 'cluster_'+cluster rank = 'same' else: cluster_name = '' rank = '' self.gviz.add_subgraph( nbunch=grp, name=cluster_name, style='invis', label='', clusterrank='local', rank=rank, ) for node in self.component_attr.keys(): pos_x = self.component_attr[node]['longitude'] pos_y = self.component_attr[node]['latitude'] if pos_x and pos_y: node_pos = str(pos_x)+","+str(pos_y)+"!" self.gviz.get_node(node).attr.update(pos=node_pos) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Customise nodes based on node type or defined clusters for node in self.component_attr.keys(): # label_mod = self.segment_long_labels(node, delims=['_', ' ']) # self.gviz.get_node(node).attr['label'] = label_mod if str(self.component_attr[node]['node_type']).lower() == 'supply': self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=10, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="rect", rank="supply", style="filled", fixedsize="true", color="limegreen", fillcolor="white", fontcolor="limegreen", peripheries=2, penwidth=1.5, height=0.8, width=1.5, ) if str(self.component_attr[node]['node_type']).lower() == 'sink': self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=7, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="doublecircle", width=0.9, height=0.9, rank="sink", penwidth=1.5, color="orangered", # royalblue3 fillcolor="white", fontcolor="orangered", # royalblue3 ) if str(self.component_attr[node]['node_type']).lower() \ == 'dependency': self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=7, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="circle", width=0.9, height=0.9, rank="dependency", penwidth=2.5, color="orchid", fillcolor="white", fontcolor="orchid" ) if str(self.component_attr[node]['node_type']).lower() \ == 'junction': self.gviz.get_node(node).attr.update( shape="point", width=0.2, height=0.2, color="#777777", fillcolor="#777777", ) if str(self.component_attr[node]['node_type']).lower() \ == 'transshipment': self.gviz.get_node(node).attr.update( fixedsize="true", label="", xlabel=node, # shape="circle", # style="rounded,filled", # width=0.2, # height=0.2, # penwidth=1.5, # color=default_node_color, ) if str(self.component_attr[node]['component_class']).lower()\ == 'bus': # POSITION MUST BE IN POINTS for this to work # tpos = self.gviz.get_node(node).attr['pos'] # poslist = [int(x.strip("!")) for x in tpos.split(",")] # posnew = str(poslist[0]) + "," + str(poslist[1] + 5) + "!" self.gviz.get_node(node).attr.update( shape="rect", penwidth=1.0, width=1.0, height=0.2, label="", xlabel=node, # xlp=posnew, ) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Draw the graph if viewcontext == "as-built": self.gviz.write(os.path.join(output_path, fname + '_gv.dot')) self.gviz.draw(os.path.join(output_path, fname + '_dot.png'), format='png', prog='dot', args='-Gdpi=300 -Gsize=8.27,11.69\!') self.gviz.draw(os.path.join(output_path, fname + '.png'), format='png', prog=self.drawing_prog, args='-n -Gdpi=300') self.gviz.draw(os.path.join(output_path, fname + '.svg'), format='svg', prog=self.drawing_prog) # ========================================================================== def draw_wtp_topology(self, viewcontext): """ Draws the component configuration for a water treatment plant. :param viewcontext: Option "as-built" indicates topology of system prior to hazard impact. Other options can be added to reflect post-impact system configuration and alternate designs. :return: generates and saves the system topology diagram in the following formats: (graphviz) dot, png, svg. """ # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Set up output file names & location if not self.out_dir.strip(): output_path = os.getcwd() else: output_path = self.out_dir # strip away file ext and add our own fname = self.out_file.split(os.extsep)[0] # Orientation of the graph (default is top-to-bottom): self.orientation = 'TB' # `connector_type` refers to the line connector type. Must be one of # ['spline', 'ortho', 'line', 'polyline', 'curved'] self.connector_type = 'ortho' self.drawing_prog = 'neato' # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - G = self.infrastructure._component_graph.digraph graphml_file = os.path.join(output_path, fname + '.graphml') G.write_graphml(graphml_file) elist = G.get_edgelist() named_elist = [] for tpl in elist: named_elist.append((G.vs[tpl[0]]['name'], G.vs[tpl[1]]['name'])) nxG = nx.DiGraph(named_elist) self.gviz = nx.nx_agraph.to_agraph(nxG) default_node_color = "royalblue3" default_edge_color = "royalblue2" self.gviz.graph_attr.update( directed=True, concentrate=False, resolution=300, orientation="portrait", labelloc="t", label='< '+self.graph_label+'<BR/><BR/> >', bgcolor="white", rankdir=self.orientation, # ranksep="1.0 equally", splines=self.connector_type, center="true", forcelabels=True, fontname="Helvetica-Bold", fontcolor="#444444", fontsize=26, # smoothing="graph_dist", smoothing="none", pad=0.5, pack=False, sep="+20", ) self.gviz.node_attr.update( shape="circle", style="filled", fixedsize="true", width=0.3, height=0.3, color=default_node_color, # gray14 fillcolor="white", fontcolor=default_node_color, # gray14 penwidth=1.5, fontname="Helvetica-Bold", fontsize=12, ) self.gviz.edge_attr.update( arrowhead="normal", arrowsize="0.7", style="bold", color=default_edge_color, # gray12 penwidth=1.0, ) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Clustering: whether to create subgraphs based on `node_cluster` # designated for components node_clusters = list(set([self.component_attr[k]['node_cluster'] for k in self.component_attr.keys()])) if self.clustering: for cluster in node_clusters: grp = [k for k in self.component_attr.keys() if self.component_attr[k]['node_cluster'] == cluster] cluster = '_'.join(cluster.split()) if cluster.lower() not in ['none', '']: cluster_name = 'cluster_'+cluster rank = 'same' else: cluster_name = '' rank = '' self.gviz.add_subgraph( nbunch=grp, name=cluster_name, style='invis', label='', clusterrank='local', rank=rank, ) for node in self.component_attr.keys(): pos_x = self.component_attr[node]['longitude'] pos_y = self.component_attr[node]['latitude'] if pos_x and pos_y: node_pos = str(pos_x)+","+str(pos_y)+"!" self.gviz.get_node(node).attr.update(pos=node_pos) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Customise nodes based on node type or defined clusters for node in self.component_attr.keys(): # label_mod = self.segment_long_labels(node, delims=['_', ' ']) # self.gviz.get_node(node).attr['label'] = label_mod if str(self.component_attr[node]['node_type']).lower() == 'supply': self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=10, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="ellipse", rank="supply", fixedsize="true", color="limegreen", fillcolor="white", fontcolor="limegreen", penwidth=1.5, width=1.5, height=0.9, ) if str(self.component_attr[node]['node_type']).lower() == 'sink': self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=7, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="ellipse", rank="sink", color="orangered", # royalblue3 fillcolor="white", fontcolor="orangered", # royalblue3 peripheries=2, penwidth=1.5, width=1.5, height=0.9, ) if str(self.component_attr[node]['node_type']).lower() \ == 'dependency': self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=7, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="circle", width=1.0, height=1.0, rank="dependency", penwidth=2.5, color="orchid", fillcolor="white", fontcolor="orchid" ) if str(self.component_attr[node]['node_type']).lower() \ == 'junction': tmplabel =\ self.segment_long_labels(node, maxlen=8, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="point", width=0.3, height=0.3, color="#888888", fillcolor="#888888", fontcolor="#888888", label="", xlabel=tmplabel, ) if str(self.component_attr[node]['node_type']).lower() \ == 'transshipment': self.gviz.get_node(node).attr.update( width=0.3, height=0.3, fixedsize="true", label="", xlabel=node, ) if str(self.component_attr[node]['component_class']).lower() in \ ['large tank', 'sedimentation basin', 'sedimentation basin - large']: self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=15, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="rect", penwidth=1.0, width=2.5, height=0.9, xlabel="", ) if str(self.component_attr[node]['component_class']).lower() in\ ['small tank', 'sedimentation basin - small', 'chlorination tank']: self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=12, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="rect", penwidth=1.0, width=1.5, height=0.9, xlabel="", ) if str(self.component_attr[node]['component_class']).lower()\ == 'chemical tank': self.gviz.get_node(node).attr.update( # shape="cylinder", shape="circle", penwidth=1.0, width=0.7, height=0.7, fixedsize="true", label="", xlabel=node, ) if str(self.component_attr[node]['component_class']).lower() in\ ['building', 'small building']: tmplabel =\ self.segment_long_labels(node, maxlen=12, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="house", penwidth=2.0, width=1.6, height=0.9, label=tmplabel, xlabel="", ) if str(self.component_attr[node]['component_class']).lower() in\ ['pump', 'pumps']: tmplabel =\ self.segment_long_labels(node, maxlen=12, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="hexagon", penwidth=1.0, width=0.5, height=0.5, fixedsize="true", label="", xlabel=tmplabel, ) if str(self.component_attr[node]['component_class']).lower() in \ ['switchroom', 'power supply']: self.gviz.get_node(node).attr['label'] =\ self.segment_long_labels(node, maxlen=15, delims=['_', ' ']) self.gviz.get_node(node).attr.update( shape="rect", style="rounded", penwidth=1.0, width=1.6, height=0.9, xlabel="", ) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Draw the graph if viewcontext == "as-built": self.gviz.draw(os.path.join(output_path, fname + '.png'), format='png', prog=self.drawing_prog, args='-n -Gdpi=300') self.gviz.draw(os.path.join(output_path, fname + '.jpg'), format='jpg', prog=self.drawing_prog, args='-n -Gdpi=300') self.gviz.write(os.path.join(output_path, fname + '_gv.dot')) self.gviz.draw(os.path.join(output_path, fname + '_dot.png'), format='png', prog='dot', args='-Gdpi=300 -Gsize=8.27,11.69\!') self.gviz.draw(os.path.join(output_path, fname + '.svg'), format='svg', prog=self.drawing_prog) # ========================================================================== def msplit(self, string, delims): s = string for d in delims: rep = d + '\n' s = rep.join(x for x in s.split(d)) return s def segment_long_labels(self, string, maxlen=7, delims=' '): if (not delims) and (len(string) > maxlen): return "\n".join( re.findall("(?s).{," + str(maxlen) + "}", string))[:-1] elif len(string) > maxlen: return self.msplit(string, delims) else: return string
38.206444
80
0.460349
3,013
32,017
4.753402
0.112512
0.038542
0.055788
0.039799
0.836056
0.813504
0.800028
0.783201
0.768119
0.710236
0
0.017637
0.403192
32,017
837
81
38.252091
0.731892
0.154387
0
0.772293
0
0
0.088133
0.004591
0
0
0
0
0
1
0.011147
false
0
0.009554
0
0.036624
0.001592
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2c0945e8da0884e9683620dc70d8758bd54ce979
46
py
Python
python/testData/refactoring/move/packageImport/before/src/c.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2018-12-29T09:53:39.000Z
2018-12-29T09:53:42.000Z
python/testData/refactoring/move/packageImport/before/src/c.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/refactoring/move/packageImport/before/src/c.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
from a import f def main(): print(f(-1))
9.2
16
0.565217
9
46
2.888889
0.888889
0
0
0
0
0
0
0
0
0
0
0.029412
0.26087
46
4
17
11.5
0.735294
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0.333333
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7
25ee1390ce0851db91e87f55ce2f07111ea6e7b4
5,320
py
Python
bokeh/tests/test_transform.py
DuCorey/bokeh
a88e24f34d40499a4608d01da8d706123350b4e6
[ "BSD-3-Clause" ]
1
2020-02-06T05:27:53.000Z
2020-02-06T05:27:53.000Z
bokeh/tests/test_transform.py
DuCorey/bokeh
a88e24f34d40499a4608d01da8d706123350b4e6
[ "BSD-3-Clause" ]
null
null
null
bokeh/tests/test_transform.py
DuCorey/bokeh
a88e24f34d40499a4608d01da8d706123350b4e6
[ "BSD-3-Clause" ]
null
null
null
import bokeh.transform as bt from bokeh.models import CategoricalColorMapper, Dodge, FactorRange, Jitter, LinearColorMapper, LogColorMapper def test_transform(): t = bt.transform("foo", "junk") assert t == dict(field="foo", transform="junk") def test_dodge(): t = bt.dodge("foo", 0.5) assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], Dodge) assert t['transform'].value == 0.5 assert t['transform'].range is None def test_dodge_with_range(): r = FactorRange("a") t = bt.dodge("foo", 0.5, range=r) assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], Dodge) assert t['transform'].value == 0.5 assert t['transform'].range is r assert t['transform'].range.factors == ["a"] def test_jitter_defaults(): t = bt.jitter("foo", width=0.5) assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], Jitter) assert t['transform'].width == 0.5 assert t['transform'].mean == 0 assert t['transform'].distribution == "uniform" assert t['transform'].range is None def test_jitter(): t = bt.jitter("foo", width=0.5, mean=0.1, distribution="normal") assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], Jitter) assert t['transform'].width == 0.5 assert t['transform'].mean == 0.1 assert t['transform'].distribution == "normal" assert t['transform'].range is None def test_jitter_with_range(): r = FactorRange("a") t = bt.jitter("foo", width=0.5, mean=0.1, range=r) assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], Jitter) assert t['transform'].width == 0.5 assert t['transform'].mean == 0.1 assert t['transform'].distribution == "uniform" assert t['transform'].range is r assert t['transform'].range.factors == ["a"] def test_factor_cmap_defaults(): t = bt.factor_cmap("foo", ["red", "green"], ["foo", "bar"]) assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], CategoricalColorMapper) assert t['transform'].palette == ["red", "green"] assert t['transform'].factors == ["foo", "bar"] assert t['transform'].start == 0 assert t['transform'].end is None assert t['transform'].nan_color == "gray" def test_factor_cmap(): t = bt.factor_cmap("foo", ["red", "green"], ["foo", "bar"], start=1, end=2, nan_color="pink") assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], CategoricalColorMapper) assert t['transform'].palette == ["red", "green"] assert t['transform'].factors == ["foo", "bar"] assert t['transform'].start == 1 assert t['transform'].end is 2 assert t['transform'].nan_color == "pink" def test_linear_cmap_defaults(): t = bt.linear_cmap("foo", ["red", "green"], 0, 10) assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], LinearColorMapper) assert t['transform'].palette == ["red", "green"] assert t['transform'].low == 0 assert t['transform'].high is 10 assert t['transform'].low_color is None assert t['transform'].high_color is None assert t['transform'].nan_color == "gray" def test_linear_cmap(): t = bt.linear_cmap("foo", ["red", "green"], 0, 10, low_color="orange", high_color="blue", nan_color="pink") assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], LinearColorMapper) assert t['transform'].palette == ["red", "green"] assert t['transform'].low == 0 assert t['transform'].high is 10 assert t['transform'].low_color == "orange" assert t['transform'].high_color == "blue" assert t['transform'].nan_color == "pink" def test_log_cmap_defaults(): t = bt.log_cmap("foo", ["red", "green"], 0, 10) assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], LogColorMapper) assert t['transform'].palette == ["red", "green"] assert t['transform'].low == 0 assert t['transform'].high is 10 assert t['transform'].low_color is None assert t['transform'].high_color is None assert t['transform'].nan_color == "gray" def test_log_cmap(): t = bt.log_cmap("foo", ["red", "green"], 0, 10, low_color="orange", high_color="blue", nan_color="pink") assert isinstance(t, dict) assert set(t) == {"field", "transform"} assert t['field'] == "foo" assert isinstance(t['transform'], LogColorMapper) assert t['transform'].palette == ["red", "green"] assert t['transform'].low == 0 assert t['transform'].high is 10 assert t['transform'].low_color == "orange" assert t['transform'].high_color == "blue" assert t['transform'].nan_color == "pink"
38.273381
111
0.626316
711
5,320
4.611814
0.084388
0.136627
0.253736
0.070448
0.898445
0.886246
0.879841
0.860323
0.84416
0.799329
0
0.013321
0.181579
5,320
138
112
38.550725
0.73978
0
0
0.736
0
0
0.203383
0
0
0
0
0
0.776
1
0.096
false
0
0.016
0
0.112
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
d3324b4530ce1373d996a651352f0e82a4984aea
495
py
Python
groupingsentences/__init__.py
garyrenapp/GroupingSentences
899cdeb53247f9c9187214a7b490d86ba2bc47ed
[ "Apache-2.0" ]
null
null
null
groupingsentences/__init__.py
garyrenapp/GroupingSentences
899cdeb53247f9c9187214a7b490d86ba2bc47ed
[ "Apache-2.0" ]
null
null
null
groupingsentences/__init__.py
garyrenapp/GroupingSentences
899cdeb53247f9c9187214a7b490d86ba2bc47ed
[ "Apache-2.0" ]
null
null
null
from groupingsentences.ssdistance import get_similarity_val_by_sentences from groupingsentences.ssdistance import get_funcname_by_func_type_id from groupingsentences.fileload import load_cells_from_file from groupingsentences.first import cells_to_groups from groupingsentences.first import save_groups_to_xls from groupingsentences.second import cells_to_xmind from groupingsentences.second import gs_grouping_sentences_to_xmind from groupingsentences.first import gs_grouping_sentences_to_xls
55
72
0.919192
67
495
6.38806
0.38806
0.392523
0.182243
0.224299
0.313084
0
0
0
0
0
0
0
0.064646
495
8
73
61.875
0.924406
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
d365d68d924c2944be150443640bb374c763c68a
41,626
py
Python
tests/timeseries/stationxml/test_channel.py
kujaku11/mt_metadata
92081e77550b737619f6c40c4ecb56e2e4d4d870
[ "MIT" ]
10
2021-01-08T23:38:45.000Z
2022-03-31T14:01:45.000Z
tests/timeseries/stationxml/test_channel.py
kujaku11/mt_metadata
92081e77550b737619f6c40c4ecb56e2e4d4d870
[ "MIT" ]
79
2021-01-09T03:24:30.000Z
2022-03-18T05:28:15.000Z
tests/timeseries/stationxml/test_channel.py
kujaku11/mt_metadata
92081e77550b737619f6c40c4ecb56e2e4d4d870
[ "MIT" ]
1
2022-03-02T01:11:38.000Z
2022-03-02T01:11:38.000Z
# -*- coding: utf-8 -*- """ Created on Sun Feb 21 15:17:41 2021 :copyright: Jared Peacock (jpeacock@usgs.gov) :license: MIT """ # ============================================================================= # Imports # ============================================================================= import unittest from collections import OrderedDict from obspy import read_inventory from mt_metadata.timeseries.stationxml import XMLChannelMTChannel from mt_metadata import STATIONXML_01, STATIONXML_02 from mt_metadata.timeseries.filters.obspy_stages import create_filter_from_stage # ============================================================================= class TestParseSerialID(unittest.TestCase): """ Test parsing a string that holdes the electrod serial ID numbers 'positive: pid, negative: nid' """ def setUp(self): self.converter = XMLChannelMTChannel() self.pid = "2004007" self.nid = "2004008" self.id_str = f"positive: {self.pid}, negative: {self.nid}" self.comma_only_str = f"{self.pid}, {self.nid}" self.generic_str = "basic" def test_parse(self): test_pid, test_nid = self.converter._parse_electrode_ids(self.id_str) self.assertEqual(test_pid, self.pid) self.assertEqual(test_nid, self.nid) def test_pares_comma_only(self): test_pid, test_nid = self.converter._parse_electrode_ids(self.comma_only_str) self.assertEqual(test_pid, self.pid) self.assertEqual(test_nid, self.nid) def test_pares_basic(self): test_pid, test_nid = self.converter._parse_electrode_ids(self.generic_str) self.assertEqual(test_pid, "basic") self.assertEqual(test_nid, "basic") class TestParseDipole(unittest.TestCase): """ Test parsing a dipole length string """ def setUp(self): self.converter = XMLChannelMTChannel() self.dipole_length = 100.0 self.dipole_str = f"{self.dipole_length} meters" def test_parse(self): d = self.converter._parse_dipole_length(self.dipole_str) self.assertEqual(d, self.dipole_length) class TestXMLChannelTwoChannels(unittest.TestCase): """ Test reading XML channel to MT Channel """ def setUp(self): self.inventory = read_inventory(STATIONXML_01.as_posix()) self.xml_hy = self.inventory.networks[0].stations[0].channels[0] self.xml_ey = self.inventory.networks[0].stations[0].channels[1] self.filters_dict = dict( [ (c.name, c) for c in [ create_filter_from_stage(s) for s in self.xml_hy.response.response_stages ] ] ) self.converter = XMLChannelMTChannel() self.maxDiff = None def test_channel_hy(self): mt_channel, mt_filters = self.converter.xml_to_mt(self.xml_hy) self.assertDictEqual( mt_channel.to_dict(), { "magnetic": OrderedDict( [ ("channel_number", None), ("comments", "run_ids: []"), ("component", "hy"), ("data_quality.rating.value", 0), ("filter.applied", [False] * 3), ( "filter.name", [ "magnetic field 3 pole butterworth low-pass", "v to counts (magnetic)", "hy time offset", ], ), ("location.elevation", 329.4), ("location.latitude", 37.633351), ("location.longitude", -121.468382), ("measurement_azimuth", 103.2), ("measurement_tilt", 0.0), ("sample_rate", 1.0), ("sensor.id", "2593"), ("sensor.manufacturer", "Barry Narod"), ("sensor.model", "fluxgate NIMS"), ("sensor.type", "Magnetometer"), ("time_period.end", "2020-07-13T21:46:12+00:00"), ("time_period.start", "2020-06-02T18:41:43+00:00"), ("type", "magnetic"), ("units", "nanotesla"), ] ) }, ) def test_channel_ey(self): mt_channel, mt_filters = self.converter.xml_to_mt(self.xml_ey) self.assertDictEqual( mt_channel.to_dict(), { "electric": OrderedDict( [ ("channel_number", None), ("comments", "run_ids: []"), ("component", "ey"), ("data_quality.rating.value", 0), ("dipole_length", 92.0), ("filter.applied", [False] * 6), ( "filter.name", [ "electric field 5 pole butterworth low-pass", "electric field 1 pole butterworth high-pass", "mv per km to v per m", "v per m to v", "v to counts (electric)", "electric time offset", ], ), ("measurement_azimuth", 103.2), ("measurement_tilt", 0.0), ("negative.elevation", 329.4), ("negative.id", "2004020"), ("negative.latitude", 37.633351), ("negative.longitude", -121.468382), ("negative.manufacturer", "Oregon State University"), ("negative.model", "Pb-PbCl2 kaolin gel Petiau 2 chamber type"), ("negative.type", "electrode"), ("positive.elevation", 329.4), ("positive.id", "200402F"), ("positive.latitude", 37.633351), ("positive.longitude", -121.468382), ("positive.manufacturer", "Oregon State University"), ("positive.model", "Pb-PbCl2 kaolin gel Petiau 2 chamber type"), ("positive.type", "electrode"), ("sample_rate", 1.0), ("time_period.end", "2020-07-13T21:46:12+00:00"), ("time_period.start", "2020-06-02T18:41:43+00:00"), ("type", "electric"), ("units", "millivolts per kilometer"), ] ) }, ) class TestXMLChannelSingleStation(unittest.TestCase): """ Test reading XML channel to MT Channel """ def setUp(self): self.inventory = read_inventory(STATIONXML_02.as_posix()) self.xml_hx = self.inventory.networks[0].stations[0].channels[0] self.xml_hy = self.inventory.networks[0].stations[0].channels[1] self.xml_hz = self.inventory.networks[0].stations[0].channels[2] self.xml_ex = self.inventory.networks[0].stations[0].channels[3] self.xml_ey = self.inventory.networks[0].stations[0].channels[4] self.filters_dict = dict( [ (c.name, c) for c in [ create_filter_from_stage(s) for s in self.xml_hy.response.response_stages ] ] ) self.converter = XMLChannelMTChannel() self.maxDiff = None def test_channel_hx(self): mt_channel, mt_filters = self.converter.xml_to_mt(self.xml_hx) self.assertDictEqual( mt_channel.to_dict(), { "magnetic": OrderedDict( [ ("channel_number", None), ("comments", "run_ids: [a,b]"), ("component", "hx"), ("data_quality.rating.value", 0), ("filter.applied", [False] * 3), ( "filter.name", [ "magnetic field 3 pole butterworth low-pass", "v to counts (magnetic)", "hx time offset", ], ), ("location.elevation", 887.775), ("location.latitude", 35.1469128125), ("location.longitude", -117.160798541667), ("measurement_azimuth", 11.8287420852694), ("measurement_tilt", 0.0), ("sample_rate", 1.0), ("sensor.id", "1303-01"), ("sensor.manufacturer", "Barry Narod"), ("sensor.model", "fluxgate NIMS"), ("sensor.type", "Magnetometer"), ("time_period.end", "2020-06-25T17:57:40+00:00"), ("time_period.start", "2020-06-08T22:57:13+00:00"), ("type", "magnetic"), ("units", "nanotesla"), ] ) }, ) def test_channel_hy(self): mt_channel, mt_filters = self.converter.xml_to_mt(self.xml_hy) self.assertDictEqual( mt_channel.to_dict(), { "magnetic": OrderedDict( [ ("channel_number", None), ("comments", "run_ids: [a,b]"), ("component", "hy"), ("data_quality.rating.value", 0), ("filter.applied", [False] * 3), ( "filter.name", [ "magnetic field 3 pole butterworth low-pass", "v to counts (magnetic)", "hy time offset", ], ), ("location.elevation", 887.775), ("location.latitude", 35.1469128125), ("location.longitude", -117.160798541667), ("measurement_azimuth", 101.828742085269), ("measurement_tilt", 0.0), ("sample_rate", 1.0), ("sensor.id", "1303-01"), ("sensor.manufacturer", "Barry Narod"), ("sensor.model", "fluxgate NIMS"), ("sensor.type", "Magnetometer"), ("time_period.end", "2020-06-25T17:57:40+00:00"), ("time_period.start", "2020-06-08T22:57:13+00:00"), ("type", "magnetic"), ("units", "nanotesla"), ] ) }, ) def test_channel_hz(self): mt_channel, mt_filters = self.converter.xml_to_mt(self.xml_hz) self.assertDictEqual( mt_channel.to_dict(), { "magnetic": OrderedDict( [ ("channel_number", None), ("comments", "run_ids: [a,b]"), ("component", "hz"), ("data_quality.rating.value", 0), ("filter.applied", [False] * 3), ( "filter.name", [ "magnetic field 3 pole butterworth low-pass", "v to counts (magnetic)", "hz time offset", ], ), ("location.elevation", 887.775), ("location.latitude", 35.1469128125), ("location.longitude", -117.160798541667), ("measurement_azimuth", 0.0), ("measurement_tilt", 0.0), ("sample_rate", 1.0), ("sensor.id", "1303-01"), ("sensor.manufacturer", "Barry Narod"), ("sensor.model", "fluxgate NIMS"), ("sensor.type", "Magnetometer"), ("time_period.end", "2020-06-25T17:57:40+00:00"), ("time_period.start", "2020-06-08T22:57:13+00:00"), ("type", "magnetic"), ("units", "nanotesla"), ] ) }, ) def test_channel_ex(self): mt_channel, mt_filters = self.converter.xml_to_mt(self.xml_ex) self.assertDictEqual( mt_channel.to_dict(), { "electric": OrderedDict( [ ("channel_number", None), ("comments", "run_ids: [a,b]"), ("component", "ex"), ("data_quality.rating.value", 0), ("dipole_length", 94.0), ("filter.applied", [False] * 6), ( "filter.name", [ "electric field 5 pole butterworth low-pass", "electric field 1 pole butterworth high-pass", "mv per km to v per m", "v per m to v", "v to counts (electric)", "electric time offset", ], ), ("measurement_azimuth", 11.8287420852694), ("measurement_tilt", 0.0), ("negative.elevation", 887.775), ("negative.id", "2004008"), ("negative.latitude", 35.1469128125), ("negative.longitude", -117.160798541667), ("negative.manufacturer", "Oregon State University"), ("negative.model", "Pb-PbCl2 kaolin gel Petiau 2 chamber type"), ("negative.type", "electrode"), ("positive.elevation", 887.775), ("positive.id", "2004007"), ("positive.latitude", 35.1469128125), ("positive.longitude", -117.160798541667), ("positive.manufacturer", "Oregon State University"), ("positive.model", "Pb-PbCl2 kaolin gel Petiau 2 chamber type"), ("positive.type", "electrode"), ("sample_rate", 1.0), ("time_period.end", "2020-06-25T17:57:40+00:00"), ("time_period.start", "2020-06-08T22:57:13+00:00"), ("type", "electric"), ("units", "millivolts per kilometer"), ] ) }, ) def test_channel_ey(self): mt_channel, mt_filters = self.converter.xml_to_mt(self.xml_ey) self.assertDictEqual( mt_channel.to_dict(), { "electric": OrderedDict( [ ("channel_number", None), ("comments", "run_ids: [a,b]"), ("component", "ey"), ("data_quality.rating.value", 0), ("dipole_length", 94.0), ("filter.applied", [False] * 6), ( "filter.name", [ "electric field 5 pole butterworth low-pass", "electric field 1 pole butterworth high-pass", "mv per km to v per m", "v per m to v", "v to counts (electric)", "electric time offset", ], ), ("measurement_azimuth", 101.828742085269), ("measurement_tilt", 0.0), ("negative.elevation", 887.775), ("negative.id", "2004004"), ("negative.latitude", 35.1469128125), ("negative.longitude", -117.160798541667), ("negative.manufacturer", "Oregon State University"), ("negative.model", "Pb-PbCl2 kaolin gel Petiau 2 chamber type"), ("negative.type", "electrode"), ("positive.elevation", 887.775), ("positive.id", "2004002"), ("positive.latitude", 35.1469128125), ("positive.longitude", -117.160798541667), ("positive.manufacturer", "Oregon State University"), ("positive.model", "Pb-PbCl2 kaolin gel Petiau 2 chamber type"), ("positive.type", "electrode"), ("sample_rate", 1.0), ("time_period.end", "2020-06-25T17:57:40+00:00"), ("time_period.start", "2020-06-08T22:57:13+00:00"), ("type", "electric"), ("units", "millivolts per kilometer"), ] ) }, ) class TestMTChannelToXML01HY(unittest.TestCase): """ Test reading network into MT mt_station object """ def setUp(self): self.inventory = read_inventory(STATIONXML_01.as_posix()) self.base_xml_channel = self.inventory.networks[0].stations[0].channels[0] self.converter = XMLChannelMTChannel() self.mt_channel, self.filters_dict = self.converter.xml_to_mt( self.base_xml_channel ) self.test_xml_channel = self.converter.mt_to_xml( self.mt_channel, self.filters_dict ) def test_location(self): self.assertEqual(self.base_xml_channel.latitude, self.test_xml_channel.latitude) self.assertEqual( self.base_xml_channel.longitude, self.test_xml_channel.longitude ) self.assertEqual( self.base_xml_channel.elevation, self.test_xml_channel.elevation ) self.assertEqual(self.base_xml_channel.depth, self.test_xml_channel.depth) def test_time_period(self): self.assertEqual( self.base_xml_channel.start_date, self.test_xml_channel.start_date ) self.assertEqual(self.base_xml_channel.end_date, self.test_xml_channel.end_date) def test_code(self): # the codes are not the same because the azimuth is more than 5 degrees from E self.assertEqual(self.base_xml_channel.code, self.test_xml_channel.code) self.assertNotEqual( self.base_xml_channel.alternate_code, self.test_xml_channel.alternate_code ) def test_sensor(self): self.assertEqual( self.base_xml_channel.sensor.type, self.test_xml_channel.sensor.type ) self.assertEqual( self.base_xml_channel.sensor.model, self.test_xml_channel.sensor.model ) self.assertEqual( self.base_xml_channel.sensor.manufacturer, self.test_xml_channel.sensor.manufacturer, ) self.assertEqual( self.base_xml_channel.sensor.serial_number, self.test_xml_channel.sensor.serial_number, ) self.assertEqual( self.base_xml_channel.sensor.description, self.test_xml_channel.sensor.description, ) def test_units(self): self.assertEqual( self.base_xml_channel.calibration_units, self.test_xml_channel.calibration_units, ) self.assertEqual( self.base_xml_channel.calibration_units_description, self.test_xml_channel.calibration_units_description, ) def test_sample_rate(self): self.assertEqual( self.base_xml_channel.sample_rate, self.test_xml_channel.sample_rate ) def test_azimuth(self): self.assertEqual(self.base_xml_channel.azimuth, self.test_xml_channel.azimuth) self.assertEqual(self.base_xml_channel.dip, self.test_xml_channel.dip) def test_types(self): self.assertEqual(self.base_xml_channel.types, self.test_xml_channel.types) class TestMTChannelToXML01EX(unittest.TestCase): """ Test reading network into MT mt_station object """ def setUp(self): self.inventory = read_inventory(STATIONXML_01.as_posix()) self.base_xml_channel = self.inventory.networks[0].stations[0].channels[1] self.converter = XMLChannelMTChannel() self.mt_channel, self.filters_dict = self.converter.xml_to_mt( self.base_xml_channel ) self.test_xml_channel = self.converter.mt_to_xml( self.mt_channel, self.filters_dict ) def test_location(self): self.assertEqual(self.base_xml_channel.latitude, self.test_xml_channel.latitude) self.assertEqual( self.base_xml_channel.longitude, self.test_xml_channel.longitude ) self.assertEqual( self.base_xml_channel.elevation, self.test_xml_channel.elevation ) self.assertEqual(self.base_xml_channel.depth, self.test_xml_channel.depth) def test_time_period(self): self.assertEqual( self.base_xml_channel.start_date, self.test_xml_channel.start_date ) self.assertEqual(self.base_xml_channel.end_date, self.test_xml_channel.end_date) def test_code(self): # the codes are not the same because the azimuth is more than 5 degrees from E self.assertNotEqual(self.base_xml_channel.code, self.assertEqual(self.base_xml_channel.code, self.test_xml_channel.code) self.assertNotEqual( self.base_xml_channel.alternate_code, self.test_xml_channel.alternate_code ) def test_sensor(self): self.assertEqual( self.base_xml_channel.sensor.type, self.test_xml_channel.sensor.type ) self.assertEqual( self.base_xml_channel.sensor.model, self.test_xml_channel.sensor.model ) self.assertEqual( self.base_xml_channel.sensor.manufacturer, self.test_xml_channel.sensor.manufacturer, ) self.assertEqual( self.base_xml_channel.sensor.serial_number, self.test_xml_channel.sensor.serial_number, ) self.assertEqual( self.base_xml_channel.sensor.description, self.test_xml_channel.sensor.description, ) def test_units(self): self.assertEqual( self.base_xml_channel.calibration_units, self.test_xml_channel.calibration_units, ) self.assertEqual( self.base_xml_channel.calibration_units_description, self.test_xml_channel.calibration_units_description, ) def test_sample_rate(self): self.assertEqual( self.base_xml_channel.sample_rate, self.test_xml_channel.sample_rate ) def test_azimuth(self): self.assertEqual(self.base_xml_channel.azimuth, self.test_xml_channel.azimuth) self.assertEqual(self.base_xml_channel.dip, self.test_xml_channel.dip) def test_types(self): self.assertEqual(self.base_xml_channel.types, self.test_xml_channel.types) class TestMTChannelToXML02HX(unittest.TestCase): """ Test reading network into MT mt_station object """ def setUp(self): self.inventory = read_inventory(STATIONXML_02.as_posix()) self.base_xml_channel = self.inventory.networks[0].stations[0].channels[0] self.converter = XMLChannelMTChannel() self.mt_channel, self.filters_dict = self.converter.xml_to_mt( self.base_xml_channel ) self.test_xml_channel = self.converter.mt_to_xml( self.mt_channel, self.filters_dict ) def test_location(self): self.assertEqual(self.base_xml_channel.latitude, self.test_xml_channel.latitude) self.assertEqual( self.base_xml_channel.longitude, self.test_xml_channel.longitude ) self.assertEqual( self.base_xml_channel.elevation, self.test_xml_channel.elevation ) self.assertEqual(self.base_xml_channel.depth, self.test_xml_channel.depth) def test_time_period(self): self.assertEqual( self.base_xml_channel.start_date, self.test_xml_channel.start_date ) self.assertEqual(self.base_xml_channel.end_date, self.test_xml_channel.end_date) def test_code(self): # the codes are not the same because the azimuth is more than 5 degrees from E self.assertNotEqual(self.base_xml_channel.code, self.assertEqual(self.base_xml_channel.code, self.test_xml_channel.code) self.assertEqual( self.base_xml_channel.alternate_code.lower(), self.test_xml_channel.alternate_code.lower(), ) def test_sensor(self): self.assertEqual( self.base_xml_channel.sensor.type, self.test_xml_channel.sensor.type ) self.assertEqual( self.base_xml_channel.sensor.model, self.test_xml_channel.sensor.model ) self.assertEqual( self.base_xml_channel.sensor.manufacturer, self.test_xml_channel.sensor.manufacturer, ) self.assertEqual( self.base_xml_channel.sensor.serial_number, self.test_xml_channel.sensor.serial_number, ) self.assertEqual( self.base_xml_channel.sensor.description, self.test_xml_channel.sensor.description, ) def test_units(self): self.assertEqual( self.base_xml_channel.calibration_units, self.test_xml_channel.calibration_units, ) self.assertEqual( self.base_xml_channel.calibration_units_description, self.test_xml_channel.calibration_units_description, ) def test_sample_rate(self): self.assertEqual( self.base_xml_channel.sample_rate, self.test_xml_channel.sample_rate ) def test_azimuth(self): self.assertEqual(self.base_xml_channel.azimuth, self.test_xml_channel.azimuth) self.assertEqual(self.base_xml_channel.dip, self.test_xml_channel.dip) def test_types(self): self.assertEqual(self.base_xml_channel.types, self.test_xml_channel.types) def test_comments(self): for comment_base, comment_test in zip( self.base_xml_channel.comments, self.test_xml_channel.comments ): self.assertEqual(comment_base.value, comment_test.value) self.assertEqual(comment_base.subject, comment_test.subject) class TestMTChannelToXML02HY(unittest.TestCase): """ Test reading network into MT mt_station object """ def setUp(self): self.inventory = read_inventory(STATIONXML_02.as_posix()) self.base_xml_channel = self.inventory.networks[0].stations[0].channels[1] self.converter = XMLChannelMTChannel() self.mt_channel, self.filters_dict = self.converter.xml_to_mt( self.base_xml_channel ) self.test_xml_channel = self.converter.mt_to_xml( self.mt_channel, self.filters_dict ) def test_location(self): self.assertEqual(self.base_xml_channel.latitude, self.test_xml_channel.latitude) self.assertEqual( self.base_xml_channel.longitude, self.test_xml_channel.longitude ) self.assertEqual( self.base_xml_channel.elevation, self.test_xml_channel.elevation ) self.assertEqual(self.base_xml_channel.depth, self.test_xml_channel.depth) def test_time_period(self): self.assertEqual( self.base_xml_channel.start_date, self.test_xml_channel.start_date ) self.assertEqual(self.base_xml_channel.end_date, self.test_xml_channel.end_date) def test_code(self): # the codes are not the same because the azimuth is more than 5 degrees from E self.assertNotEqual(self.base_xml_channel.code, self.assertEqual(self.base_xml_channel.code, self.test_xml_channel.code) self.assertEqual( self.base_xml_channel.alternate_code.lower(), self.test_xml_channel.alternate_code.lower(), ) def test_sensor(self): self.assertEqual( self.base_xml_channel.sensor.type, self.test_xml_channel.sensor.type ) self.assertEqual( self.base_xml_channel.sensor.model, self.test_xml_channel.sensor.model ) self.assertEqual( self.base_xml_channel.sensor.manufacturer, self.test_xml_channel.sensor.manufacturer, ) self.assertEqual( self.base_xml_channel.sensor.serial_number, self.test_xml_channel.sensor.serial_number, ) self.assertEqual( self.base_xml_channel.sensor.description, self.test_xml_channel.sensor.description, ) def test_units(self): self.assertEqual( self.base_xml_channel.calibration_units, self.test_xml_channel.calibration_units, ) self.assertEqual( self.base_xml_channel.calibration_units_description, self.test_xml_channel.calibration_units_description, ) def test_sample_rate(self): self.assertEqual( self.base_xml_channel.sample_rate, self.test_xml_channel.sample_rate ) def test_azimuth(self): self.assertEqual(self.base_xml_channel.azimuth, self.test_xml_channel.azimuth) self.assertEqual(self.base_xml_channel.dip, self.test_xml_channel.dip) def test_types(self): self.assertEqual(self.base_xml_channel.types, self.test_xml_channel.types) def test_comments(self): for comment_base, comment_test in zip( self.base_xml_channel.comments, self.test_xml_channel.comments ): self.assertEqual(comment_base.value, comment_test.value) self.assertEqual(comment_base.subject, comment_test.subject) class TestMTChannelToXML02HZ(unittest.TestCase): """ Test reading network into MT mt_station object """ def setUp(self): self.inventory = read_inventory(STATIONXML_02.as_posix()) self.base_xml_channel = self.inventory.networks[0].stations[0].channels[2] self.converter = XMLChannelMTChannel() self.mt_channel, self.filters_dict = self.converter.xml_to_mt( self.base_xml_channel ) self.test_xml_channel = self.converter.mt_to_xml( self.mt_channel, self.filters_dict ) def test_location(self): self.assertEqual(self.base_xml_channel.latitude, self.test_xml_channel.latitude) self.assertEqual( self.base_xml_channel.longitude, self.test_xml_channel.longitude ) self.assertEqual( self.base_xml_channel.elevation, self.test_xml_channel.elevation ) self.assertEqual(self.base_xml_channel.depth, self.test_xml_channel.depth) def test_time_period(self): self.assertEqual( self.base_xml_channel.start_date, self.test_xml_channel.start_date ) self.assertEqual(self.base_xml_channel.end_date, self.test_xml_channel.end_date) def test_code(self): # the codes are not the same because the azimuth is more than 5 degrees from E self.assertNotEqual(self.base_xml_channel.code, self.assertEqual(self.base_xml_channel.code, self.test_xml_channel.code) self.assertEqual( self.base_xml_channel.alternate_code.lower(), self.test_xml_channel.alternate_code.lower(), ) def test_sensor(self): self.assertEqual( self.base_xml_channel.sensor.type, self.test_xml_channel.sensor.type ) self.assertEqual( self.base_xml_channel.sensor.model, self.test_xml_channel.sensor.model ) self.assertEqual( self.base_xml_channel.sensor.manufacturer, self.test_xml_channel.sensor.manufacturer, ) self.assertEqual( self.base_xml_channel.sensor.serial_number, self.test_xml_channel.sensor.serial_number, ) self.assertEqual( self.base_xml_channel.sensor.description, self.test_xml_channel.sensor.description, ) def test_units(self): self.assertEqual( self.base_xml_channel.calibration_units, self.test_xml_channel.calibration_units, ) self.assertEqual( self.base_xml_channel.calibration_units_description, self.test_xml_channel.calibration_units_description, ) def test_sample_rate(self): self.assertEqual( self.base_xml_channel.sample_rate, self.test_xml_channel.sample_rate ) def test_azimuth(self): self.assertEqual(self.base_xml_channel.azimuth, self.test_xml_channel.azimuth) self.assertEqual(self.base_xml_channel.dip, self.test_xml_channel.dip) def test_types(self): self.assertEqual(self.base_xml_channel.types, self.test_xml_channel.types) def test_comments(self): for comment_base, comment_test in zip( self.base_xml_channel.comments, self.test_xml_channel.comments ): self.assertEqual(comment_base.value, comment_test.value) self.assertEqual(comment_base.subject, comment_test.subject) class TestMTChannelToXML02EX(unittest.TestCase): """ Test reading network into MT mt_station object """ def setUp(self): self.inventory = read_inventory(STATIONXML_02.as_posix()) self.base_xml_channel = self.inventory.networks[0].stations[0].channels[3] self.converter = XMLChannelMTChannel() self.mt_channel, self.filters_dict = self.converter.xml_to_mt( self.base_xml_channel ) self.test_xml_channel = self.converter.mt_to_xml( self.mt_channel, self.filters_dict ) def test_location(self): self.assertEqual(self.base_xml_channel.latitude, self.test_xml_channel.latitude) self.assertEqual( self.base_xml_channel.longitude, self.test_xml_channel.longitude ) self.assertEqual( self.base_xml_channel.elevation, self.test_xml_channel.elevation ) self.assertEqual(self.base_xml_channel.depth, self.test_xml_channel.depth) def test_time_period(self): self.assertEqual( self.base_xml_channel.start_date, self.test_xml_channel.start_date ) self.assertEqual(self.base_xml_channel.end_date, self.test_xml_channel.end_date) def test_code(self): # the codes are not the same because the azimuth is more than 5 degrees from E self.assertNotEqual(self.base_xml_channel.code, self.assertEqual(self.base_xml_channel.code, self.test_xml_channel.code) self.assertEqual( self.base_xml_channel.alternate_code.lower(), self.test_xml_channel.alternate_code.lower(), ) def test_sensor(self): self.assertEqual( self.base_xml_channel.sensor.type, self.test_xml_channel.sensor.type ) self.assertEqual( self.base_xml_channel.sensor.model, self.test_xml_channel.sensor.model ) self.assertEqual( self.base_xml_channel.sensor.manufacturer, self.test_xml_channel.sensor.manufacturer, ) self.assertEqual( self.base_xml_channel.sensor.serial_number, self.test_xml_channel.sensor.serial_number, ) self.assertEqual( self.base_xml_channel.sensor.description, self.test_xml_channel.sensor.description, ) def test_units(self): self.assertEqual( self.base_xml_channel.calibration_units, self.test_xml_channel.calibration_units, ) self.assertEqual( self.base_xml_channel.calibration_units_description, self.test_xml_channel.calibration_units_description, ) def test_sample_rate(self): self.assertEqual( self.base_xml_channel.sample_rate, self.test_xml_channel.sample_rate ) def test_azimuth(self): self.assertEqual(self.base_xml_channel.azimuth, self.test_xml_channel.azimuth) self.assertEqual(self.base_xml_channel.dip, self.test_xml_channel.dip) def test_types(self): self.assertEqual(self.base_xml_channel.types, self.test_xml_channel.types) def test_comments(self): for comment_base, comment_test in zip( self.base_xml_channel.comments, self.test_xml_channel.comments ): self.assertEqual(comment_base.value, comment_test.value) self.assertEqual(comment_base.subject, comment_test.subject) class TestMTChannelToXML02EY(unittest.TestCase): """ Test reading network into MT mt_station object """ def setUp(self): self.inventory = read_inventory(STATIONXML_02.as_posix()) self.base_xml_channel = self.inventory.networks[0].stations[0].channels[4] self.converter = XMLChannelMTChannel() self.mt_channel, self.filters_dict = self.converter.xml_to_mt( self.base_xml_channel ) self.test_xml_channel = self.converter.mt_to_xml( self.mt_channel, self.filters_dict ) def test_location(self): self.assertEqual(self.base_xml_channel.latitude, self.test_xml_channel.latitude) self.assertEqual( self.base_xml_channel.longitude, self.test_xml_channel.longitude ) self.assertEqual( self.base_xml_channel.elevation, self.test_xml_channel.elevation ) self.assertEqual(self.base_xml_channel.depth, self.test_xml_channel.depth) def test_time_period(self): self.assertEqual( self.base_xml_channel.start_date, self.test_xml_channel.start_date ) self.assertEqual(self.base_xml_channel.end_date, self.test_xml_channel.end_date) def test_code(self): # the codes are not the same because the azimuth is more than 5 degrees from E self.assertNotEqual(self.base_xml_channel.code, self.assertEqual(self.base_xml_channel.code, self.test_xml_channel.code) self.assertEqual( self.base_xml_channel.alternate_code.lower(), self.test_xml_channel.alternate_code.lower(), ) def test_sensor(self): self.assertEqual( self.base_xml_channel.sensor.type, self.test_xml_channel.sensor.type ) self.assertEqual( self.base_xml_channel.sensor.model, self.test_xml_channel.sensor.model ) self.assertEqual( self.base_xml_channel.sensor.manufacturer, self.test_xml_channel.sensor.manufacturer, ) self.assertEqual( self.base_xml_channel.sensor.serial_number, self.test_xml_channel.sensor.serial_number, ) self.assertEqual( self.base_xml_channel.sensor.description, self.test_xml_channel.sensor.description, ) def test_units(self): self.assertEqual( self.base_xml_channel.calibration_units, self.test_xml_channel.calibration_units, ) self.assertEqual( self.base_xml_channel.calibration_units_description, self.test_xml_channel.calibration_units_description, ) def test_sample_rate(self): self.assertEqual( self.base_xml_channel.sample_rate, self.test_xml_channel.sample_rate ) def test_azimuth(self): self.assertEqual(self.base_xml_channel.azimuth, self.test_xml_channel.azimuth) self.assertEqual(self.base_xml_channel.dip, self.test_xml_channel.dip) def test_types(self): self.assertEqual(self.base_xml_channel.types, self.test_xml_channel.types) def test_comments(self): for comment_base, comment_test in zip( self.base_xml_channel.comments, self.test_xml_channel.comments ): self.assertEqual(comment_base.value, comment_test.value) self.assertEqual(comment_base.subject, comment_test.subject) # ============================================================================= # Run # ============================================================================= if __name__ == "__main__": unittest.main()
39.493359
141
0.562077
4,323
41,626
5.15406
0.058062
0.136888
0.078004
0.127642
0.941699
0.938064
0.937301
0.932992
0.928369
0.918989
0
0.031162
0.330851
41,626
1,053
142
39.530864
0.768759
0.046509
0
0.729977
0
0
0.115069
0.016482
0
0
0
0
0.179634
1
0.094966
false
0.011442
0.006865
0
0.114416
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
6cab67fe92e4ce8d71423f582ba60c58bb9877d2
34,662
py
Python
billforward/apis/tokenization_api.py
billforward/bf-python
d2b812329ca3ed1fd94364d7f46f69ad74665596
[ "Apache-2.0" ]
2
2016-11-23T17:32:37.000Z
2022-02-24T05:13:20.000Z
billforward/apis/tokenization_api.py
billforward/bf-python
d2b812329ca3ed1fd94364d7f46f69ad74665596
[ "Apache-2.0" ]
null
null
null
billforward/apis/tokenization_api.py
billforward/bf-python
d2b812329ca3ed1fd94364d7f46f69ad74665596
[ "Apache-2.0" ]
1
2016-12-30T20:02:48.000Z
2016-12-30T20:02:48.000Z
# coding: utf-8 """ BillForward REST API OpenAPI spec version: 1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class TokenizationApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def auth_capture(self, auth_capture_request, **kwargs): """ [Note: this API can be invoked more simply by our client-side card capture library, <a href=\"https://github.com/billforward/billforward-js\">BillForward.js</a>; you should not need to interact with this API manually unless you have particularly bespoke requirements] {\"nickname\":\"Authorized card capture\",\"response\":\"BFJSAuthCapture.html\",\"request\":\"BFJSAuthCapture.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.auth_capture(auth_capture_request, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param AuthCaptureRequest auth_capture_request: The auth capture request. (required) :return: PaymentMethodPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.auth_capture_with_http_info(auth_capture_request, **kwargs) else: (data) = self.auth_capture_with_http_info(auth_capture_request, **kwargs) return data def auth_capture_with_http_info(self, auth_capture_request, **kwargs): """ [Note: this API can be invoked more simply by our client-side card capture library, <a href=\"https://github.com/billforward/billforward-js\">BillForward.js</a>; you should not need to interact with this API manually unless you have particularly bespoke requirements] {\"nickname\":\"Authorized card capture\",\"response\":\"BFJSAuthCapture.html\",\"request\":\"BFJSAuthCapture.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.auth_capture_with_http_info(auth_capture_request, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param AuthCaptureRequest auth_capture_request: The auth capture request. (required) :return: PaymentMethodPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['auth_capture_request'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method auth_capture" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'auth_capture_request' is set if ('auth_capture_request' not in params) or (params['auth_capture_request'] is None): raise ValueError("Missing the required parameter `auth_capture_request` when calling `auth_capture`") resource_path = '/tokenization/auth-capture'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'auth_capture_request' in params: body_params = params['auth_capture_request'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/xml', 'application/xml', 'application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PaymentMethodPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def braintree_card_capture(self, **kwargs): """ [Warning: for use only in PCI-compliant environments; for more information, <a href=\"mailto:support@billforward.net\">contact us</a> regarding provisioning of your own on-premise BillForward instance] Captures raw credit card details into Braintree's vault. {\"nickname\":\"Braintree Tokenization\",\"response\":\"braintreeDirectTokenization.html\",\"request\":\"braintreeDirectTokenization.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.braintree_card_capture(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param BraintreeCaptureRequest body: :return: PaymentMethodPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.braintree_card_capture_with_http_info(**kwargs) else: (data) = self.braintree_card_capture_with_http_info(**kwargs) return data def braintree_card_capture_with_http_info(self, **kwargs): """ [Warning: for use only in PCI-compliant environments; for more information, <a href=\"mailto:support@billforward.net\">contact us</a> regarding provisioning of your own on-premise BillForward instance] Captures raw credit card details into Braintree's vault. {\"nickname\":\"Braintree Tokenization\",\"response\":\"braintreeDirectTokenization.html\",\"request\":\"braintreeDirectTokenization.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.braintree_card_capture_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param BraintreeCaptureRequest body: :return: PaymentMethodPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['body'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method braintree_card_capture" % key ) params[key] = val del params['kwargs'] resource_path = '/tokenization/braintree'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PaymentMethodPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def pay_vision_shout_v1(self, **kwargs): """ [Note: this API is intended to be invoked by the PayVision servers -- they are BillForward's way of informing client-side of the result of card-capture from within an iframe] Generates iframe to which customer will be directed upon success or failure. The iframe contains JavaScript which attempts to send a message to BillForward.js on the client-side, which will handle the result. {\"nickname\":\"Generate PayVision iframe redirect\",\"response\":\"payVisionShoutV1.html\",\"request\":\"payVisionShoutV1.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.pay_vision_shout_v1(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str _resource_path: :param str id: :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.pay_vision_shout_v1_with_http_info(**kwargs) else: (data) = self.pay_vision_shout_v1_with_http_info(**kwargs) return data def pay_vision_shout_v1_with_http_info(self, **kwargs): """ [Note: this API is intended to be invoked by the PayVision servers -- they are BillForward's way of informing client-side of the result of card-capture from within an iframe] Generates iframe to which customer will be directed upon success or failure. The iframe contains JavaScript which attempts to send a message to BillForward.js on the client-side, which will handle the result. {\"nickname\":\"Generate PayVision iframe redirect\",\"response\":\"payVisionShoutV1.html\",\"request\":\"payVisionShoutV1.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.pay_vision_shout_v1_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str _resource_path: :param str id: :return: str If the method is called asynchronously, returns the request thread. """ all_params = ['_resource_path', 'id'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method pay_vision_shout_v1" % key ) params[key] = val del params['kwargs'] resource_path = '/tokenization/payvision-shout-v1'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} if '_resource_path' in params: form_params.append(('resourcePath', params['_resource_path'])) if 'id' in params: form_params.append(('id', params['id'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/html']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def pre_auth(self, pre_auth_request, **kwargs): """ [Note: this API can be invoked more simply by our client-side card capture library, <a href=\"https://github.com/billforward/billforward-js\">BillForward.js</a>; you should not need to interact with this API manually unless you have particularly bespoke requirements] {\"nickname\":\"Pre-authorize card capture\",\"response\":\"BFJSPreAuth.html\",\"request\":\"BFJSPreAuth.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.pre_auth(pre_auth_request, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param InsertableBillingEntity pre_auth_request: The auth request. (required) :return: TokenizationPreAuthPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.pre_auth_with_http_info(pre_auth_request, **kwargs) else: (data) = self.pre_auth_with_http_info(pre_auth_request, **kwargs) return data def pre_auth_with_http_info(self, pre_auth_request, **kwargs): """ [Note: this API can be invoked more simply by our client-side card capture library, <a href=\"https://github.com/billforward/billforward-js\">BillForward.js</a>; you should not need to interact with this API manually unless you have particularly bespoke requirements] {\"nickname\":\"Pre-authorize card capture\",\"response\":\"BFJSPreAuth.html\",\"request\":\"BFJSPreAuth.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.pre_auth_with_http_info(pre_auth_request, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param InsertableBillingEntity pre_auth_request: The auth request. (required) :return: TokenizationPreAuthPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['pre_auth_request'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method pre_auth" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'pre_auth_request' is set if ('pre_auth_request' not in params) or (params['pre_auth_request'] is None): raise ValueError("Missing the required parameter `pre_auth_request` when calling `pre_auth`") resource_path = '/tokenization/pre-auth'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'pre_auth_request' in params: body_params = params['pre_auth_request'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/xml', 'application/xml', 'application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TokenizationPreAuthPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def sage_pay_notify_v300(self, **kwargs): """ [Note: this API is intended to be invoked by the SagePay servers -- they are BillForward's way of receiving a callback from a SagePay card capture operation, using SagePay's FORM Protocol, v3.0] Handles SagePay Notification. {\"nickname\":\"Handle SagePay Notification\",\"response\":\"sagePayNotifyV3_00.html\",\"request\":\"sagePayNotifyV3_00.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.sage_pay_notify_v300(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.Multiple organization-IDs may be specified by repeated use of the queryparameter. Example: ...&organizations=org1&organizations=org2 :param str bill_forward_url_root: The URL through which BFJS connected to BillForward. :param str access_token: The public token through which BFJS connected to BillForward. :param str vps_protocol: :param str tx_type: :param str vendor_tx_code: :param str status: :param str vps_tx_id: :param str card_type: :param str token: :param str status_detail: :param str last4_digits: :param str vps_signature: :param str expiry_date: :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.sage_pay_notify_v300_with_http_info(**kwargs) else: (data) = self.sage_pay_notify_v300_with_http_info(**kwargs) return data def sage_pay_notify_v300_with_http_info(self, **kwargs): """ [Note: this API is intended to be invoked by the SagePay servers -- they are BillForward's way of receiving a callback from a SagePay card capture operation, using SagePay's FORM Protocol, v3.0] Handles SagePay Notification. {\"nickname\":\"Handle SagePay Notification\",\"response\":\"sagePayNotifyV3_00.html\",\"request\":\"sagePayNotifyV3_00.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.sage_pay_notify_v300_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.Multiple organization-IDs may be specified by repeated use of the queryparameter. Example: ...&organizations=org1&organizations=org2 :param str bill_forward_url_root: The URL through which BFJS connected to BillForward. :param str access_token: The public token through which BFJS connected to BillForward. :param str vps_protocol: :param str tx_type: :param str vendor_tx_code: :param str status: :param str vps_tx_id: :param str card_type: :param str token: :param str status_detail: :param str last4_digits: :param str vps_signature: :param str expiry_date: :return: str If the method is called asynchronously, returns the request thread. """ all_params = ['organizations', 'bill_forward_url_root', 'access_token', 'vps_protocol', 'tx_type', 'vendor_tx_code', 'status', 'vps_tx_id', 'card_type', 'token', 'status_detail', 'last4_digits', 'vps_signature', 'expiry_date'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method sage_pay_notify_v300" % key ) params[key] = val del params['kwargs'] resource_path = '/tokenization/sagepay-notify-v3-00'.replace('{format}', 'json') path_params = {} query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] if 'bill_forward_url_root' in params: query_params['billForwardURLRoot'] = params['bill_forward_url_root'] if 'access_token' in params: query_params['access_token'] = params['access_token'] header_params = {} form_params = [] local_var_files = {} if 'vps_protocol' in params: form_params.append(('VPSProtocol', params['vps_protocol'])) if 'tx_type' in params: form_params.append(('TxType', params['tx_type'])) if 'vendor_tx_code' in params: form_params.append(('VendorTxCode', params['vendor_tx_code'])) if 'status' in params: form_params.append(('Status', params['status'])) if 'vps_tx_id' in params: form_params.append(('VPSTxId', params['vps_tx_id'])) if 'card_type' in params: form_params.append(('CardType', params['card_type'])) if 'token' in params: form_params.append(('Token', params['token'])) if 'status_detail' in params: form_params.append(('StatusDetail', params['status_detail'])) if 'last4_digits' in params: form_params.append(('Last4Digits', params['last4_digits'])) if 'vps_signature' in params: form_params.append(('VPSSignature', params['vps_signature'])) if 'expiry_date' in params: form_params.append(('ExpiryDate', params['expiry_date'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/plain']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def sage_pay_shout_v300(self, **kwargs): """ [Note: this API is intended to be invoked by the SagePay servers -- they are BillForward's way of informing client-side of the result of card-capture from within an iframe, using SagePay's FORM Protocol, v3.0] Generates iframe to which customer will be directed upon success or failure. The iframe contains JavaScript which attempts to send a message to BillForward.js on the client-side, which will handle the result. {\"nickname\":\"Generate SagePay iframe redirect\",\"response\":\"sagePayShoutV3_00.html\",\"request\":\"sagePayShoutV3_00.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.sage_pay_shout_v300(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.Multiple organization-IDs may be specified by repeated use of the queryparameter. Example: ...&organizations=org1&organizations=org2 :param str s: :param str t: :param str c: :param str e: :param str l: :param str d: :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.sage_pay_shout_v300_with_http_info(**kwargs) else: (data) = self.sage_pay_shout_v300_with_http_info(**kwargs) return data def sage_pay_shout_v300_with_http_info(self, **kwargs): """ [Note: this API is intended to be invoked by the SagePay servers -- they are BillForward's way of informing client-side of the result of card-capture from within an iframe, using SagePay's FORM Protocol, v3.0] Generates iframe to which customer will be directed upon success or failure. The iframe contains JavaScript which attempts to send a message to BillForward.js on the client-side, which will handle the result. {\"nickname\":\"Generate SagePay iframe redirect\",\"response\":\"sagePayShoutV3_00.html\",\"request\":\"sagePayShoutV3_00.request.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.sage_pay_shout_v300_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.Multiple organization-IDs may be specified by repeated use of the queryparameter. Example: ...&organizations=org1&organizations=org2 :param str s: :param str t: :param str c: :param str e: :param str l: :param str d: :return: str If the method is called asynchronously, returns the request thread. """ all_params = ['organizations', 's', 't', 'c', 'e', 'l', 'd'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method sage_pay_shout_v300" % key ) params[key] = val del params['kwargs'] resource_path = '/tokenization/sagepay-shout-v3-00'.replace('{format}', 'json') path_params = {} query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] if 's' in params: query_params['s'] = params['s'] if 't' in params: query_params['t'] = params['t'] if 'c' in params: query_params['c'] = params['c'] if 'e' in params: query_params['e'] = params['e'] if 'l' in params: query_params['l'] = params['l'] if 'd' in params: query_params['d'] = params['d'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/html']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['text/plain']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'))
46.58871
426
0.601494
3,788
34,662
5.316262
0.089229
0.047671
0.019863
0.021452
0.892244
0.87243
0.856639
0.849439
0.841643
0.831513
0
0.004846
0.309474
34,662
743
427
46.651413
0.836516
0.439155
0
0.649123
1
0
0.167865
0.047738
0
0
0
0
0
1
0.038012
false
0
0.020468
0
0.114035
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
9f5b88a409389f54b1bf1a2665c5c7f3559d70f5
248
py
Python
tests/test_launch_ssh.py
mushonnip/colab-ssh
a7ec5877e486da1cdf07e38294e450e64b0bf81e
[ "MIT" ]
623
2020-06-22T10:47:07.000Z
2022-03-31T15:23:08.000Z
tests/test_launch_ssh.py
mushonnip/colab-ssh
a7ec5877e486da1cdf07e38294e450e64b0bf81e
[ "MIT" ]
63
2020-07-16T16:15:03.000Z
2022-03-29T22:54:46.000Z
tests/test_launch_ssh.py
mushonnip/colab-ssh
a7ec5877e486da1cdf07e38294e450e64b0bf81e
[ "MIT" ]
135
2020-06-29T18:13:31.000Z
2022-03-25T10:41:48.000Z
from colab_ssh import launch_ssh_cloudflared def test_success(): launch_ssh_cloudflared("123456", verbose=True, kill_other_processes=True) def test_success_without_password(): launch_ssh_cloudflared(verbose=True, kill_other_processes=True)
24.8
75
0.83871
34
248
5.676471
0.5
0.139896
0.310881
0.207254
0.341969
0.341969
0
0
0
0
0
0.026432
0.084677
248
9
76
27.555556
0.823789
0
0
0
0
0
0.024194
0
0
0
0
0
0
1
0.4
true
0.2
0.2
0
0.6
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
1
0
0
7
9f6f1a7aa710ba99fdb408d63954260717f62124
120
py
Python
integration/tests_ok/ignore_asserts.py
jleverenz/hurl
b81ca8ab7e0e409ec0c074fd8e118721ff4d3fb3
[ "Apache-2.0" ]
null
null
null
integration/tests_ok/ignore_asserts.py
jleverenz/hurl
b81ca8ab7e0e409ec0c074fd8e118721ff4d3fb3
[ "Apache-2.0" ]
null
null
null
integration/tests_ok/ignore_asserts.py
jleverenz/hurl
b81ca8ab7e0e409ec0c074fd8e118721ff4d3fb3
[ "Apache-2.0" ]
null
null
null
from app import app from flask import Response @app.route("/ignore_asserts") def ignore_asserts(): return "Hello"
15
29
0.741667
17
120
5.117647
0.647059
0.298851
0
0
0
0
0
0
0
0
0
0
0.158333
120
7
30
17.142857
0.861386
0
0
0
0
0
0.166667
0
0
0
0
0
0.4
1
0.2
true
0
0.4
0.2
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
7
9faa50fff92905c1d31a80f9ec06eed3db2282a7
4,148
py
Python
osfclient/tests/test_mkdir.py
RCOSDP/rdmclient
72a4e81899a2b093c8924951d4f9bf21c29b9bfa
[ "BSD-3-Clause" ]
null
null
null
osfclient/tests/test_mkdir.py
RCOSDP/rdmclient
72a4e81899a2b093c8924951d4f9bf21c29b9bfa
[ "BSD-3-Clause" ]
4
2019-05-10T01:48:21.000Z
2020-01-16T22:55:36.000Z
osfclient/tests/test_mkdir.py
yacchin1205/rdmclient
96bf8397a246a5d4f24435db3581178d7ce56041
[ "BSD-3-Clause" ]
1
2019-05-10T01:33:32.000Z
2019-05-10T01:33:32.000Z
"""Test `osf remove` command""" import pytest from mock import call from mock import patch from osfclient import OSF from osfclient.cli import makefolder from osfclient.tests.mocks import MockArgs from osfclient.tests.mocks import MockProject def test_anonymous_doesnt_work(): args = MockArgs(project='1234') def simple_getenv(key): return None with pytest.raises(SystemExit) as e: with patch('osfclient.cli.os.getenv', side_effect=simple_getenv) as mock_getenv: makefolder(args) expected = 'create a folder you need to provide a username and password' assert expected in e.value.args[0] @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_make_sub_folder(OSF_project): args = MockArgs(project='1234', username='joe', target='osfstorage/a/new') def simple_getenv(key): if key == 'OSF_PASSWORD': return 'secret' with patch('osfclient.cli.os.getenv', side_effect=simple_getenv): makefolder(args) OSF_project.assert_called_once_with('1234') MockProject = OSF_project.return_value MockStorage = MockProject._storage_mock.return_value for f in MockStorage.folders: if f._path_mock.return_value == '/a': assert call.create_folder('new') in f.mock_calls @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_make_root_folder(OSF_project): args = MockArgs(project='1234', username='joe', target='osfstorage/new') def simple_getenv(key): if key == 'OSF_PASSWORD': return 'secret' with patch('osfclient.cli.os.getenv', side_effect=simple_getenv): makefolder(args) OSF_project.assert_called_once_with('1234') MockProject = OSF_project.return_value MockStorage = MockProject._storage_mock.return_value assert call.create_folder('new') in MockStorage.mock_calls @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_make_recursive_sub_folder(OSF_project): args = MockArgs(project='1234', username='joe', target='osfstorage/a/new1/new2') def simple_getenv(key): if key == 'OSF_PASSWORD': return 'secret' with patch('osfclient.cli.os.getenv', side_effect=simple_getenv): makefolder(args) OSF_project.assert_called_once_with('1234') MockProject = OSF_project.return_value MockStorage = MockProject._storage_mock.return_value for f in MockStorage.folders: if f._path_mock.return_value == '/a': assert call.create_folder('new1') in f.mock_calls assert call.create_folder().create_folder('new2') in f.mock_calls @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_make_recursive_root_folder(OSF_project): args = MockArgs(project='1234', username='joe', target='osfstorage/new1/new2') def simple_getenv(key): if key == 'OSF_PASSWORD': return 'secret' with patch('osfclient.cli.os.getenv', side_effect=simple_getenv): makefolder(args) OSF_project.assert_called_once_with('1234') MockProject = OSF_project.return_value MockStorage = MockProject._storage_mock.return_value assert call.create_folder('new1') in MockStorage.mock_calls assert call.create_folder('new1') in MockStorage.mock_calls @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_wrong_storage_name(OSF_project): args = MockArgs(project='1234', username='joe', target='DOESNTEXIST/a/a/a') def simple_getenv(key): if key == 'OSF_PASSWORD': return 'secret' with patch('osfclient.cli.os.getenv', side_effect=simple_getenv): makefolder(args) OSF_project.assert_called_once_with('1234') # the mock storage is called osfstorage, so we should not call remove() MockProject = OSF_project.return_value MockStorage = MockProject._storage_mock.return_value for f in MockStorage.files: if f._path_mock.return_value == '/a/a/a': assert call.remove() not in f.mock_calls for f in MockStorage.folders: assert call.remove() not in f.mock_calls
32.40625
84
0.706606
545
4,148
5.152294
0.154128
0.071225
0.05698
0.074786
0.833689
0.804487
0.789174
0.780983
0.758903
0.722578
0
0.021566
0.183944
4,148
127
85
32.661417
0.807976
0.023144
0
0.643678
0
0
0.129016
0.039545
0
0
0
0
0.16092
1
0.137931
false
0.068966
0.08046
0.011494
0.287356
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
9fb13e1200ac13dff1eea2d04b2b619b158f394a
218
py
Python
Desafios/Desafio21.py
Punkays/Phyton3-Estudos
047ef62ddaf506fe3f653de3a1b2999874bbf12f
[ "Unlicense" ]
null
null
null
Desafios/Desafio21.py
Punkays/Phyton3-Estudos
047ef62ddaf506fe3f653de3a1b2999874bbf12f
[ "Unlicense" ]
null
null
null
Desafios/Desafio21.py
Punkays/Phyton3-Estudos
047ef62ddaf506fe3f653de3a1b2999874bbf12f
[ "Unlicense" ]
null
null
null
# ! Desafio 21 # ! Faça um programa que abra e reproduza um arquivo MP3 import playsound playsound.playsound('/home/michael/Downloads/mx-sound-theme-fresh-and-clean/Fresh_and_Clean/stereo/button-pressed.ogg', True)
31.142857
125
0.784404
33
218
5.121212
0.818182
0.213018
0.153846
0
0
0
0
0
0
0
0
0.015385
0.105505
218
7
125
31.142857
0.851282
0.307339
0
0
0
0.5
0.648649
0.648649
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
9fc5ccb99ed09f44a228875b306c487ac5883631
2,332
py
Python
tests/test_session_manager.py
barseghyanartur/sqlalchemy-unchained
048b44f619f5b09a4bb133aee132d42e0fe06d4d
[ "MIT" ]
6
2020-03-02T15:25:00.000Z
2021-07-15T22:08:31.000Z
tests/test_session_manager.py
barseghyanartur/sqlalchemy-unchained
048b44f619f5b09a4bb133aee132d42e0fe06d4d
[ "MIT" ]
null
null
null
tests/test_session_manager.py
barseghyanartur/sqlalchemy-unchained
048b44f619f5b09a4bb133aee132d42e0fe06d4d
[ "MIT" ]
1
2021-07-15T22:08:33.000Z
2021-07-15T22:08:33.000Z
import pytest from sqlalchemy_unchained import SessionManager @pytest.fixture() def session_manager(db): yield SessionManager() class TestSessionManager: def test_it_requires_a_session_factory(self): SessionManager.set_session_factory(None) with pytest.raises(Exception) as e: fail = SessionManager() assert 'SessionManager was not properly initialized.' in str(e.value) def test_save_without_commit(self, db, session_manager: SessionManager): class Foobar(db.Model): pass db.Model.metadata.create_all() foo = Foobar() session_manager.save(foo) assert foo in session_manager.session with session_manager.no_autoflush: assert not session_manager.query(Foobar).all() def test_save_with_commit(self, db, session_manager: SessionManager): class Foobar(db.Model): pass db.Model.metadata.create_all() foo = Foobar() session_manager.save(foo, commit=True) assert foo in session_manager.session assert session_manager.query(Foobar).all() == [foo] def test_save_all_without_commit(self, db, session_manager: SessionManager): class Foobar(db.Model): pass db.Model.metadata.create_all() foo1 = Foobar() foo2 = Foobar() session_manager.save_all([foo1, foo2]) assert foo1 in session_manager.session assert foo2 in session_manager.session with session_manager.no_autoflush: assert not session_manager.query(Foobar).all() def test_save_all_with_commit(self, db, session_manager: SessionManager): class Foobar(db.Model): pass db.Model.metadata.create_all() foo1 = Foobar() foo2 = Foobar() session_manager.save_all([foo1, foo2]) assert foo1 in session_manager.session assert foo2 in session_manager.session assert session_manager.query(Foobar).all() == [foo1, foo2] def test_commit(self, db, session_manager: SessionManager): class Foobar(db.Model): pass db.Model.metadata.create_all() foo = Foobar() session_manager.save(foo) session_manager.commit() assert session_manager.query(Foobar).all() == [foo]
28.096386
80
0.656089
273
2,332
5.395604
0.190476
0.23761
0.065173
0.093686
0.745418
0.745418
0.733198
0.706042
0.706042
0.706042
0
0.008074
0.256432
2,332
82
81
28.439024
0.841407
0
0
0.666667
0
0
0.018868
0
0
0
0
0
0.210526
1
0.122807
false
0.087719
0.035088
0
0.263158
0
0
0
0
null
1
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
4cd9994cb3247f980f18d065d7c3797be2f2df25
4,663
py
Python
old/obssheet.py
PracticalAstronomyCrew/data-pipeline
336b17b6b610ef845be944c60e288c2ba77db3e4
[ "MIT" ]
null
null
null
old/obssheet.py
PracticalAstronomyCrew/data-pipeline
336b17b6b610ef845be944c60e288c2ba77db3e4
[ "MIT" ]
null
null
null
old/obssheet.py
PracticalAstronomyCrew/data-pipeline
336b17b6b610ef845be944c60e288c2ba77db3e4
[ "MIT" ]
null
null
null
import os import sys import pyfits root='/net/vega/data/users/observatory/images/'+str(sys.argv[1])+'/' f = open("OBSSHEET_"+str(sys.argv[1])+".txt","w+") f.write('BLAAUW OBSERVATORY OBSSHEET FOR '+str(sys.argv[1])+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write('STL-6303E IMAGER ('+str(len(os.listdir(root+'/STL-6303E/i/')))+' frames)\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write("{:28s} {:24s} {:15s} {:6s} {:5s} {:4s} {:4s}".format('FILENAME','DATE-OBS','IMAGETYP','FILTER','EXPT','XBIN','YBIN')+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') for filename in sorted(os.listdir(root+'/STL-6303E/i/')): if filename.endswith(".FIT") or filename.endswith(".fits"): hdu = pyfits.open(root+'/STL-6303E/i/'+filename) f.write("{:28s} {:24s} {:15s} {:6s} {:5.1f} {:4d} {:4d}".format(filename,hdu[0].header['DATE-OBS'],hdu[0].header['IMAGETYP'],hdu[0].header['FILTER'],hdu[0].header['EXPTIME'],hdu[0].header['CCDXBIN'],hdu[0].header['CCDYBIN'])+'\n') continue else: continue f.write('--------------------------------------------------------------------------------------------'+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write('STL-6303E GUIDER ('+str(len(os.listdir(root+'/STL-6303E/g/')))+' frames)\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write("{:28s} {:24s} {:15s} {:6s} {:5s} {:4s} {:4s}".format('FILENAME','DATE-OBS','IMAGETYP','FILTER','EXPT','XBIN','YBIN')+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') for filename in sorted(os.listdir(root+'/STL-6303E/g/')): if filename.endswith(".FIT") or filename.endswith(".fits"): hdu = pyfits.open(root+'/STL-6303E/g/'+filename) f.write("{:28s} {:24s} {:15s} {:6s} {:5.1f} {:4d} {:4d}".format(filename,hdu[0].header['DATE-OBS'],hdu[0].header['IMAGETYP'],hdu[0].header['FILTER'],hdu[0].header['EXPTIME'],hdu[0].header['CCDXBIN'],hdu[0].header['CCDYBIN'])+'\n') continue else: continue f.write('--------------------------------------------------------------------------------------------'+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write('ST-7 IMAGER ('+str(len(os.listdir(root+'/ST-7/i/')))+' frames)\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write("{:28s} {:24s} {:15s} {:6s} {:5s} {:4s} {:4s}".format('FILENAME','DATE-OBS','IMAGETYP','FILTER','EXPT','XBIN','YBIN')+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') for filename in sorted(os.listdir(root+'/ST-7/i/')): if filename.endswith(".FIT") or filename.endswith(".fits"): hdu = pyfits.open(root+'/ST-7/i/'+filename) f.write("{:28s} {:24s} {:15s} {:6s} {:5.1f} {:4d} {:4d}".format(filename,hdu[0].header['DATE-OBS'],hdu[0].header['IMAGETYP'],hdu[0].header['FILTER'],hdu[0].header['EXPTIME'],hdu[0].header['CCDXBIN'],hdu[0].header['CCDYBIN'])+'\n') continue else: continue f.write('--------------------------------------------------------------------------------------------'+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write('ST-7 GUIDER ('+str(len(os.listdir(root+'/ST-7/g/')))+' frames)\n') f.write('--------------------------------------------------------------------------------------------'+'\n') f.write("{:28s} {:24s} {:15s} {:6s} {:5s} {:4s} {:4s}".format('FILENAME','DATE-OBS','IMAGETYP','FILTER','EXPT','XBIN','YBIN')+'\n') f.write('--------------------------------------------------------------------------------------------'+'\n') for filename in sorted(os.listdir(root+'/ST-7/g/')): if filename.endswith(".FIT") or filename.endswith(".fits"): hdu = pyfits.open(root+'/ST-7/g/'+filename) f.write("{:28s} {:24s} {:15s} {:6s} {:5.1f} {:4d} {:4d}".format(filename,hdu[0].header['DATE-OBS'],hdu[0].header['IMAGETYP'],hdu[0].header['FILTER'],hdu[0].header['EXPTIME'],hdu[0].header['CCDXBIN'],hdu[0].header['CCDYBIN'])+'\n') continue else: continue f.write('--------------------------------------------------------------------------------------------'+'\n') f.close()
68.573529
238
0.39674
491
4,663
3.765784
0.138493
0.094105
0.1298
0.05192
0.916171
0.916171
0.903191
0.850189
0.833964
0.833964
0
0.034833
0.070341
4,663
67
239
69.597015
0.391696
0
0
0.677966
0
0.135593
0.552005
0.324255
0
0
0
0
0
1
0
false
0
0.050847
0
0.050847
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e268b2fea4f9fc2df7a1076c5e7036d782221030
10,861
py
Python
models/rsrae.py
MPCAICDM/MPCA
c996435a0578ea4160f934bc01041c2ef23468f3
[ "MIT" ]
null
null
null
models/rsrae.py
MPCAICDM/MPCA
c996435a0578ea4160f934bc01041c2ef23468f3
[ "MIT" ]
null
null
null
models/rsrae.py
MPCAICDM/MPCA
c996435a0578ea4160f934bc01041c2ef23468f3
[ "MIT" ]
null
null
null
from functools import reduce from operator import mul from typing import Tuple import torch import torch.nn as nn from loss_functions.rsr_loss import RSRLoss class _Encoder(nn.Module): def __init__(self, in_channels, hidden_layer_sizes, activation, pad_mode): super(_Encoder, self).__init__() self.activation = activation self.hidden_layer_sizes = hidden_layer_sizes self.is_bn = True if self.is_bn: self.layers = nn.Sequential( nn.Conv2d(in_channels=in_channels, out_channels=hidden_layer_sizes[0], kernel_size=5, stride=2, padding=2), # 14*14 self.activation, nn.BatchNorm2d(num_features=hidden_layer_sizes[0]), nn.Conv2d(in_channels=hidden_layer_sizes[0], out_channels=hidden_layer_sizes[1], kernel_size=5, stride=2, padding=2), # 7 * 7 self.activation, nn.BatchNorm2d(num_features=hidden_layer_sizes[1]), nn.Conv2d(in_channels=hidden_layer_sizes[1], out_channels=hidden_layer_sizes[2], kernel_size=3, stride=2, padding=pad_mode), # 3 * 3 self.activation, nn.BatchNorm2d(num_features=hidden_layer_sizes[2]) ) else: self.layers = nn.Sequential( nn.Conv2d(in_channels=in_channels, out_channels=hidden_layer_sizes[0], kernel_size=5, stride=2, padding=2), # 14*14 self.activation, #nn.BatchNorm2d(num_features=hidden_layer_sizes[0]), nn.Conv2d(in_channels=hidden_layer_sizes[0], out_channels=hidden_layer_sizes[1], kernel_size=5, stride=2, padding=2), # 7 * 7 self.activation, #nn.BatchNorm2d(num_features=hidden_layer_sizes[1]), nn.Conv2d(in_channels=hidden_layer_sizes[1], out_channels=hidden_layer_sizes[2], kernel_size=3, stride=2, padding=pad_mode), # 3 * 3 self.activation, #nn.BatchNorm2d(num_features=hidden_layer_sizes[2]) ) def forward(self, x): return self.layers(x) class _Decoder(nn.Module): def __init__(self, in_channels,out_channels, h_shape, hidden_layer_sizes, activation, pad_mode): super(_Decoder, self).__init__() self.is_bn = True self.activation = activation self.hidden_layer_sizes = hidden_layer_sizes self.h_shape = h_shape h_channels = h_shape[0]*h_shape[1]*h_shape[2] self.fc = nn.Linear(in_features=in_channels, out_features=h_channels) self.bn = nn.BatchNorm2d(num_features=h_shape[0]) if self.is_bn: self.deconv1 = nn.Sequential( nn.ConvTranspose2d(in_channels=h_shape[0], out_channels=hidden_layer_sizes[1], kernel_size=3, stride=2, padding=pad_mode, output_padding=pad_mode), self.activation, nn.BatchNorm2d(num_features=hidden_layer_sizes[1]) ) self.deconv2 = nn.Sequential( nn.ConvTranspose2d(in_channels=hidden_layer_sizes[1], out_channels=hidden_layer_sizes[0], kernel_size=5, stride=2,padding=2, output_padding=1), self.activation, nn.BatchNorm2d(num_features=hidden_layer_sizes[0]) ) self.deconv3 = nn.Sequential( nn.ConvTranspose2d(in_channels=hidden_layer_sizes[0], out_channels=out_channels, kernel_size=5, stride=2, padding=2, output_padding=1), self.activation, # nn.Conv2d(in_channels=16, out_channels=out_channels, kernel_size=1, bias=False), # self.activation, ) else: self.deconv1 = nn.Sequential( nn.ConvTranspose2d(in_channels=h_shape[0], out_channels=hidden_layer_sizes[1], kernel_size=3, stride=2, padding=pad_mode, output_padding=pad_mode), self.activation, # nn.BatchNorm2d(num_features=hidden_layer_sizes[1]) ) self.deconv2 = nn.Sequential( nn.ConvTranspose2d(in_channels=hidden_layer_sizes[1], out_channels=hidden_layer_sizes[0], kernel_size=5, stride=2, padding=2, output_padding=1), self.activation, # nn.BatchNorm2d(num_features=hidden_layer_sizes[0]) ) self.deconv3 = nn.Sequential( nn.ConvTranspose2d(in_channels=hidden_layer_sizes[0], out_channels=out_channels, kernel_size=5, stride=2, padding=2, output_padding=1), self.activation, # nn.Conv2d(in_channels=16, out_channels=out_channels, kernel_size=1, bias=False), # self.activation, ) def forward(self, x): h = self.fc(x) h = self.activation(h) h = h.view(-1, *self.h_shape) h = self.bn(h) #print(h.shape) h = self.deconv1(h) h = self.deconv2(h) h = self.deconv3(h) return h def l2_norm(input, dim): norm = torch.norm(input, 2, dim, True) output = torch.div(input, norm) return output class _Decoder_Linear(nn.Module): def __init__(self, in_channels,out_channels, hidden_layer_sizes, activation, bn): super(_Decoder_Linear, self).__init__() self.activation = activation self.hidden_layer_sizes = hidden_layer_sizes if bn: self.deconv = nn.Sequential( nn.Linear(in_features=in_channels, out_features=hidden_layer_sizes[2]), # 14*14 self.activation, nn.BatchNorm1d(num_features=hidden_layer_sizes[2]), nn.Linear(in_features=hidden_layer_sizes[2], out_features=hidden_layer_sizes[1]), # 14*14 self.activation, nn.BatchNorm1d(num_features=hidden_layer_sizes[1]), nn.Linear(in_features=hidden_layer_sizes[1], out_features=hidden_layer_sizes[0]), # 14*14 self.activation, nn.BatchNorm1d(num_features=hidden_layer_sizes[0]), nn.Linear(in_features=hidden_layer_sizes[0], out_features=out_channels), # 14*14 self.activation, ) else: self.deconv = nn.Sequential( nn.Linear(in_features=in_channels, out_features=hidden_layer_sizes[2]), # 14*14 self.activation, nn.Linear(in_features=hidden_layer_sizes[2], out_features=hidden_layer_sizes[1]), # 14*14 self.activation, nn.Linear(in_features=hidden_layer_sizes[1], out_features=hidden_layer_sizes[0]), # 14*14 self.activation, nn.Linear(in_features=hidden_layer_sizes[0], out_features=out_channels), # 14*14 self.activation, ) print('nobn') def forward(self, x): h = self.deconv(x) return h class _Encoder_Linear(nn.Module): def __init__(self, in_channels, hidden_layer_sizes, activation, bn): super(_Encoder_Linear, self).__init__() self.activation = activation self.hidden_layer_sizes = hidden_layer_sizes if bn: self.layers = nn.Sequential( nn.Linear(in_features=in_channels, out_features=hidden_layer_sizes[0]), # 14*14 self.activation, nn.BatchNorm1d(num_features=hidden_layer_sizes[0]), nn.Linear(in_features=hidden_layer_sizes[0], out_features=hidden_layer_sizes[1]), # 14*14 self.activation, nn.BatchNorm1d(num_features=hidden_layer_sizes[1]), nn.Linear(in_features=hidden_layer_sizes[1], out_features=hidden_layer_sizes[2]), # 14*14 self.activation, nn.BatchNorm1d(num_features=hidden_layer_sizes[2]), ) else: self.layers = nn.Sequential( nn.Linear(in_features=in_channels, out_features=hidden_layer_sizes[0]), # 14*14 self.activation, nn.Linear(in_features=hidden_layer_sizes[0], out_features=hidden_layer_sizes[1]), # 14*14 self.activation, nn.Linear(in_features=hidden_layer_sizes[1], out_features=hidden_layer_sizes[2]), # 14*14 self.activation, ) print('nobn') def forward(self, x): return self.layers(x) class RSRAE(nn.Module): def __init__(self, input_shape, hidden_layer_sizes, z_channels): super(RSRAE, self).__init__() h, w, c = input_shape pad_mode = 1 if h % 8 == 0 else 0 self.encoder = _Encoder(in_channels=c, hidden_layer_sizes=hidden_layer_sizes, activation=nn.Tanh(), pad_mode=pad_mode) h_shape = (hidden_layer_sizes[2], h//8, w//8) h_channels = h_shape[0]*h_shape[1]*h_shape[2] self.A = torch.nn.Parameter(torch.randn(h_channels, z_channels)) self.renorm = False self.decoder = _Decoder(in_channels=z_channels, out_channels=c, h_shape=h_shape,pad_mode=pad_mode, hidden_layer_sizes=hidden_layer_sizes,activation=nn.Tanh()) def forward(self, x): y = self.encoder(x) y = y.view(y.shape[0], -1) y_rsr = torch.matmul(y, self.A) if self.renorm: z = l2_norm(y_rsr, -1) else: z = y_rsr x_r = self.decoder(z) #print(y.shape, y_rsr.shape, z.shape, x_r.shape) return y, y_rsr, z, x_r class RSRAE_Linear(nn.Module): def __init__(self, input_shape, hidden_layer_sizes, z_channels, bn): super(RSRAE_Linear, self).__init__() c = input_shape self.encoder = _Encoder_Linear(in_channels=c, hidden_layer_sizes=hidden_layer_sizes, activation=nn.ReLU(),bn=bn) self.bn = bn self.A = torch.nn.Parameter(torch.randn(hidden_layer_sizes[2], z_channels)) self.renorm = False self.decoder = _Decoder_Linear(in_channels=z_channels, out_channels=c,bn=bn, hidden_layer_sizes=hidden_layer_sizes,activation=nn.ReLU()) def forward(self, x): y = self.encoder(x) y = y.view(y.shape[0], -1) y_rsr = torch.matmul(y, self.A) if self.renorm: z = l2_norm(y_rsr, -1) else: z = y_rsr x_r = self.decoder(z) #print(y.shape, y_rsr.shape, z.shape, x_r.shape) return y, y_rsr, z, x_r
40.984906
126
0.591198
1,362
10,861
4.408957
0.068282
0.146545
0.213156
0.151873
0.878768
0.871107
0.865612
0.830641
0.80816
0.767694
0
0.031611
0.306786
10,861
264
127
41.140152
0.765972
0.062333
0
0.63285
0
0
0.000788
0
0
0
0
0
0
1
0.062802
false
0
0.028986
0.009662
0.154589
0.009662
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e28e8794f4f59a53d464046576a0bd9097e64abc
4,082
py
Python
src/triage/component/results_schema/alembic/versions/b097e47ba829_remove_random_seed_from_experiments.py
josephbajor/triage_NN
cbaee6e5a06e597c91fec372717d89a2b5f34fa5
[ "MIT" ]
160
2017-06-13T09:59:59.000Z
2022-03-21T22:00:35.000Z
src/triage/component/results_schema/alembic/versions/b097e47ba829_remove_random_seed_from_experiments.py
josephbajor/triage_NN
cbaee6e5a06e597c91fec372717d89a2b5f34fa5
[ "MIT" ]
803
2016-10-21T19:44:02.000Z
2022-03-29T00:02:33.000Z
src/triage/component/results_schema/alembic/versions/b097e47ba829_remove_random_seed_from_experiments.py
josephbajor/triage_NN
cbaee6e5a06e597c91fec372717d89a2b5f34fa5
[ "MIT" ]
59
2017-01-31T22:10:22.000Z
2022-03-19T12:35:03.000Z
"""empty message Revision ID: b097e47ba829 Revises: 45219f25072b Create Date: 2021-05-20 15:40:47.288721 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'b097e47ba829' down_revision = '45219f25072b' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('experiment_runs', 'matrices_errored', existing_type=sa.INTEGER(), nullable=True, schema='triage_metadata') op.alter_column('experiment_runs', 'matrices_made', existing_type=sa.INTEGER(), nullable=True, schema='triage_metadata') op.alter_column('experiment_runs', 'matrices_skipped', existing_type=sa.INTEGER(), nullable=True, schema='triage_metadata') op.alter_column('experiment_runs', 'models_errored', existing_type=sa.INTEGER(), nullable=True, schema='triage_metadata') op.alter_column('experiment_runs', 'models_made', existing_type=sa.INTEGER(), nullable=True, schema='triage_metadata') op.alter_column('experiment_runs', 'models_skipped', existing_type=sa.INTEGER(), nullable=True, schema='triage_metadata') op.drop_column('experiments', 'random_seed', schema='triage_metadata') op.create_index(op.f('ix_triage_metadata_matrices_matrix_uuid'), 'matrices', ['matrix_uuid'], unique=True, schema='triage_metadata') op.drop_index('ix_model_metadata_matrices_matrix_uuid', table_name='matrices', schema='triage_metadata') op.create_index(op.f('ix_triage_metadata_models_model_hash'), 'models', ['model_hash'], unique=True, schema='triage_metadata') op.drop_index('ix_model_metadata_models_model_hash', table_name='models', schema='triage_metadata') op.create_foreign_key('models_built_in_experiment_run_fkey', 'models', 'experiment_runs', ['built_in_experiment_run'], ['id'], source_schema='triage_metadata', referent_schema='triage_metadata') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('models_built_in_experiment_run_fkey', 'models', schema='triage_metadata', type_='foreignkey') op.create_index('ix_model_metadata_models_model_hash', 'models', ['model_hash'], unique=True, schema='triage_metadata') op.drop_index(op.f('ix_triage_metadata_models_model_hash'), table_name='models', schema='triage_metadata') op.create_index('ix_model_metadata_matrices_matrix_uuid', 'matrices', ['matrix_uuid'], unique=True, schema='triage_metadata') op.drop_index(op.f('ix_triage_metadata_matrices_matrix_uuid'), table_name='matrices', schema='triage_metadata') op.add_column('experiments', sa.Column('random_seed', sa.INTEGER(), autoincrement=False, nullable=True), schema='triage_metadata') op.alter_column('experiment_runs', 'models_skipped', existing_type=sa.INTEGER(), nullable=False, schema='triage_metadata') op.alter_column('experiment_runs', 'models_made', existing_type=sa.INTEGER(), nullable=False, schema='triage_metadata') op.alter_column('experiment_runs', 'models_errored', existing_type=sa.INTEGER(), nullable=False, schema='triage_metadata') op.alter_column('experiment_runs', 'matrices_skipped', existing_type=sa.INTEGER(), nullable=False, schema='triage_metadata') op.alter_column('experiment_runs', 'matrices_made', existing_type=sa.INTEGER(), nullable=False, schema='triage_metadata') op.alter_column('experiment_runs', 'matrices_errored', existing_type=sa.INTEGER(), nullable=False, schema='triage_metadata') # ### end Alembic commands ###
46.91954
198
0.667075
459
4,082
5.592593
0.185185
0.158161
0.19478
0.179977
0.817296
0.817296
0.790806
0.753409
0.719907
0.719907
0
0.017316
0.207741
4,082
86
199
47.465116
0.776438
0.072269
0
0.705882
0
0
0.3512
0.103733
0
0
0
0
0
1
0.029412
false
0
0.029412
0
0.058824
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
e2bdba86f4d42714e2bc6a1aa03b813777340dbf
759
py
Python
miamm/menus/permissions.py
kimond/miamm
b351ea22c1d48e1ff7012099dda1474e4658a617
[ "BSD-3-Clause" ]
2
2015-01-27T15:03:58.000Z
2015-01-27T16:29:56.000Z
miamm/menus/permissions.py
kimond/miamm
b351ea22c1d48e1ff7012099dda1474e4658a617
[ "BSD-3-Clause" ]
null
null
null
miamm/menus/permissions.py
kimond/miamm
b351ea22c1d48e1ff7012099dda1474e4658a617
[ "BSD-3-Clause" ]
null
null
null
from rest_framework import permissions class IsOwnerOrMenuUser(permissions.BasePermission): """ Object-level permission to only allow owners of an object to access it. Assumes the model instance has an `owner` attribute. """ def has_object_permission(self, request, view, obj): # Instance must have an attribute named `owner`. return obj.owner == request.user class IsMenuOwner(permissions.BasePermission): """ Object-level permission to only allow owners of an object to access it. Assumes the model instance has an `owner` attribute. """ def has_object_permission(self, request, view, obj): # Instance must have an attribute named `owner`. return obj.menu.owner == request.user
30.36
75
0.704875
96
759
5.520833
0.395833
0.09434
0.116981
0.135849
0.796226
0.796226
0.796226
0.796226
0.796226
0.796226
0
0
0.217391
759
24
76
31.625
0.892256
0.453228
0
0.285714
0
0
0
0
0
0
0
0
0
1
0.285714
false
0
0.142857
0.285714
1
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
8
e2f684b5def03f96162838cfbb6fe0d00601f5c4
3,808
py
Python
examples/clingo/csp/csp/tests/test_fs.py
wanko/clingo
d0c434c977ebb742d8a486166bb64bfce5b5fc4b
[ "MIT" ]
null
null
null
examples/clingo/csp/csp/tests/test_fs.py
wanko/clingo
d0c434c977ebb742d8a486166bb64bfce5b5fc4b
[ "MIT" ]
null
null
null
examples/clingo/csp/csp/tests/test_fs.py
wanko/clingo
d0c434c977ebb742d8a486166bb64bfce5b5fc4b
[ "MIT" ]
null
null
null
""" Basic tests checking the whole system. """ import unittest from csp.tests import solve FSI = """\ machine(1). machine(2). task(a). duration(a,1,3). duration(a,2,4). task(b). duration(b,1,1). duration(b,2,6). task(c). duration(c,1,5). duration(c,2,5). """ FSE = """\ 1 { cycle(T,U) : task(U), U != T } 1 :- task(T). 1 { cycle(T,U) : task(T), U != T } 1 :- task(U). reach(M) :- M = #min { T : task(T) }. reach(U) :- reach(T), cycle(T,U). :- task(T), not reach(T). 1 { start(T) : task(T) } 1. permutation(T,U) :- cycle(T,U), not start(U). seq((T,M),(T,M+1),D) :- task(T), duration(T,M,D), machine(M+1). seq((T1,M),(T2,M),D) :- permutation(T1,T2), duration(T1,M,D). &sum { 1*T1 + -1*T2 } <= -D :- seq(T1,T2,D). &sum { -1*(T,M) } <= 0 :- duration(T,M,D). &sum { 1*(T,M) } <= B :- duration(T,M,D), B=bound-D. #show permutation/2. """ FSD = """\ 1 { cycle(T,U) : task(U), U != T } 1 :- task(T). 1 { cycle(T,U) : task(T), U != T } 1 :- task(U). reach(M) :- M = #min { T : task(T) }. reach(U) :- reach(T), cycle(T,U). :- task(T), not reach(T). 1 { start(T) : task(T) } 1. permutation(T,U) :- cycle(T,U), not start(U). seq((T,M),(T,M+1),D) :- task(T), duration(T,M,D), machine(M+1). seq((T1,M),(T2,M),D) :- permutation(T1,T2), duration(T1,M,D). &diff { T1-T2 } <= -D :- seq(T1,T2,D). &diff { 0-(T,M) } <= 0 :- duration(T,M,D). &sum { (T,M)-0 } <= B :- duration(T,M,D), B=bound-D. #show permutation/2. """ SOL11 = [ ['permutation(a,c)', 'permutation(b,a)', ('(a,1)', 1), ('(a,2)', 7), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 4), ('(c,2)', 11)], ['permutation(a,c)', 'permutation(b,a)', ('(a,1)', 1), ('(a,2)', 7), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 5), ('(c,2)', 11)], ['permutation(a,c)', 'permutation(b,a)', ('(a,1)', 1), ('(a,2)', 7), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 6), ('(c,2)', 11)], ['permutation(a,c)', 'permutation(b,a)', ('(a,1)', 2), ('(a,2)', 7), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 5), ('(c,2)', 11)], ['permutation(a,c)', 'permutation(b,a)', ('(a,1)', 2), ('(a,2)', 7), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 6), ('(c,2)', 11)], ['permutation(a,c)', 'permutation(b,a)', ('(a,1)', 3), ('(a,2)', 7), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 6), ('(c,2)', 11)]] SOL16 = SOL11 + [ ['permutation(b,c)', 'permutation(c,a)', ('(a,1)', 6), ('(a,2)', 12), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 1), ('(c,2)', 7)], ['permutation(b,c)', 'permutation(c,a)', ('(a,1)', 7), ('(a,2)', 12), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 1), ('(c,2)', 7)], ['permutation(b,c)', 'permutation(c,a)', ('(a,1)', 7), ('(a,2)', 12), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 2), ('(c,2)', 7)], ['permutation(b,c)', 'permutation(c,a)', ('(a,1)', 8), ('(a,2)', 12), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 1), ('(c,2)', 7)], ['permutation(b,c)', 'permutation(c,a)', ('(a,1)', 8), ('(a,2)', 12), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 2), ('(c,2)', 7)], ['permutation(b,c)', 'permutation(c,a)', ('(a,1)', 9), ('(a,2)', 12), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 1), ('(c,2)', 7)], ['permutation(b,c)', 'permutation(c,a)', ('(a,1)', 9), ('(a,2)', 12), ('(b,1)', 0), ('(b,2)', 1), ('(c,1)', 2), ('(c,2)', 7)]] # pylint: disable=missing-docstring class TestMain(unittest.TestCase): def test_fse(self): self.assertEqual(solve(FSE + FSI, maxint=10, options=["-c", "bound=16"]), []) self.assertEqual(solve(FSE + FSI, maxint=11, options=["-c", "bound=16"]), SOL11) self.assertEqual(solve(FSE + FSI, options=["-c", "bound=16"]), SOL16) def test_fsd(self): self.assertEqual(solve(FSD + FSI, maxint=10, options=["-c", "bound=16"]), []) self.assertEqual(solve(FSD + FSI, maxint=11, options=["-c", "bound=16"]), SOL11) self.assertEqual(solve(FSD + FSI, options=["-c", "bound=16"]), SOL16)
42.786517
130
0.452468
683
3,808
2.519766
0.096633
0.019756
0.022661
0.030215
0.825683
0.818129
0.748402
0.748402
0.712377
0.712377
0
0.081844
0.162553
3,808
88
131
43.272727
0.457824
0.01917
0
0.370968
0
0.209677
0.584384
0
0
0
0
0
0.096774
1
0.032258
false
0
0.032258
0
0.080645
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
391fbed68cba95e3034b6c7c5f4871ed7a4a4a26
635
py
Python
Math/829. Consecutive Numbers Sum.py
beckswu/Leetcode
480e8dc276b1f65961166d66efa5497d7ff0bdfd
[ "MIT" ]
138
2020-02-08T05:25:26.000Z
2021-11-04T11:59:28.000Z
Math/829. Consecutive Numbers Sum.py
beckswu/Leetcode
480e8dc276b1f65961166d66efa5497d7ff0bdfd
[ "MIT" ]
null
null
null
Math/829. Consecutive Numbers Sum.py
beckswu/Leetcode
480e8dc276b1f65961166d66efa5497d7ff0bdfd
[ "MIT" ]
24
2021-01-02T07:18:43.000Z
2022-03-20T08:17:54.000Z
class Solution: def consecutiveNumbersSum(self, N: int) -> int: count = 0 # x > 0 --> N/k - (k + 1)/2 > 0 upper_limit = ceil((2 * N + 0.25)**0.5 - 0.5) + 1 for k in range(1, upper_limit): # x should be integer if (N - k * (k + 1) // 2) % k == 0: count += 1 return count class Solution: def consecutiveNumbersSum(self, N: int) -> int: count = 0 upper_limit = ceil((2 * N + 0.25)**0.5 - 0.5) + 1 for k in range(1, upper_limit): N -= k if N % k == 0: count += 1 return count
27.608696
57
0.437795
92
635
2.978261
0.282609
0.029197
0.116788
0.270073
0.908759
0.872263
0.733577
0.733577
0.733577
0.733577
0
0.086957
0.420472
635
23
58
27.608696
0.657609
0.077165
0
0.823529
0
0
0
0
0
0
0
0
0
1
0.117647
false
0
0
0
0.352941
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
3929389259543b132899ae51dc550238c09f5a4e
17,518
py
Python
mimecast/komand_mimecast/util/util.py
xhennessy-r7/insightconnect-plugins
59268051313d67735b5dd3a30222eccb92aca8e9
[ "MIT" ]
null
null
null
mimecast/komand_mimecast/util/util.py
xhennessy-r7/insightconnect-plugins
59268051313d67735b5dd3a30222eccb92aca8e9
[ "MIT" ]
null
null
null
mimecast/komand_mimecast/util/util.py
xhennessy-r7/insightconnect-plugins
59268051313d67735b5dd3a30222eccb92aca8e9
[ "MIT" ]
null
null
null
from komand_mimecast.util.log_helper import LogHelper import json import base64 import hashlib import hmac import uuid import datetime import requests # For mapping params{} to the values needed for request payloads def normalize(key: str, value: str) -> dict: if '_' not in key: if value != '' and value != 'none': return {key: value} return {} chunks = list(filter(lambda c: len(c), key.split("_"))) for i in range(1, len(chunks)): chunks[i] = chunks[i].capitalize() if value != '' and value != 'none': return {''.join(chunks): value} return {} class Authentication: # URI's for login and logout _URI_LOGIN = '/api/login/login' _URI_LOGOUT = '/api/login/logout' def __init__(self, logger=None): if logger: self.logger = logger else: self.logger = LogHelper().logger def login(self, url: str, username: str, password: str, auth_type: str, app_id: str) -> dict: """ This method is used to create the initial connection and return an access and secret key for further api calls :param url: The server URL :param username: Username to login with :param password: Users password :param auth_type: The type of authentication: cloud or domain :param app_id: The application ID for the app that will be logging in :return: an access_key and a secret_key """ # Set full URL url = url + self._URI_LOGIN # Base64 encode credentials credentials = username + ':' + password credentials = base64.b64encode(credentials.encode()) credentials = credentials.decode("UTF-8") # Generate request header values request_id = str(uuid.uuid4()) hdr_date = datetime.datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S") + " UTC" # Create request headers headers = { 'Authorization': auth_type + ' ' + credentials, 'x-mc-app-id': app_id, 'x-mc-date': hdr_date, 'x-mc-req-id': request_id, 'Content-Type': 'application/json' } payload = { "data": [ { "userName": username } ] } try: request = requests.post(url=url, headers=headers, data=str(payload)) except requests.exceptions.RequestException as e: raise Exception(e) if request.status_code in range(300, 399): raise Exception(f'Redirect error. The Mimecast server responded with a {request.status_code} code.' f' Check the Mimecast URL and contact support if this issue persists') if request.status_code in range(400, 499): self.logger.error(request.text) raise Exception(f'Bad request error. The Mimecast server responded with a {request.status_code} code.' f' Check the username and password, as well as the Mimecast app ID and key.') try: response = request.json() except json.decoder.JSONDecodeError: self.logger.error(request.text) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. Response not in JSON format. Response in logs') try: # Catch errors returned by Mimecast api if response['fail']: self.logger.error(response['fail']) raise Exception( 'Failed to authenticate. Please double-check the credentials in the Connection.' ' If the issue persists, please contact support.' ' Status code is {code},' ' Mimecast error message: {message}. See log for more details'.format(code=response['meta']['status'], message=response['fail'][0]['message'])) except KeyError: self.logger.error(response) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. See the response in logs') try: access_key = response['data'][0]['accessKey'] except KeyError: self.logger.error(response) raise Exception('Error: Unable to authenticate to the Mimecast API.' ' Please double-check the credentials in the Connection.' ' If the issue persists, please contact support. See log for more details') try: secret_key = response['data'][0]['secretKey'] self.logger.error(response) except KeyError: raise Exception('Error: Unable to authenticate to the Mimecast API.' ' Please double-check the credentials in the Connection.' ' If the issue persists, please contact support. See log for more details') return {'access_key': access_key, 'secret_key': secret_key} def refresh_key(self, url: str, username: str, password: str, access_key: str, app_id: str, auth_type: str)-> dict: """ This method is used to refresh a connection if the connection has timed out. :param url: The server URL :param username: Username to login with :param password: Users password :param access_key: The access_key for the connection that needs to be refreshed :param auth_type: The type of authentication: cloud or domain :param app_id: The application ID for the app that will be logging in :param auth_type: The auth type used :return: A dictionary that contains information about the connection refresh """ # Set full URL url = url + self._URI_LOGIN # Generate request header values request_id = str(uuid.uuid4()) hdr_date = datetime.datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S") + " UTC" # Base64 encode credentials credentials = username + ':' + password credentials = base64.b64encode(credentials.encode()) credentials = credentials.decode("UTF-8") # Create request headers headers = { 'Authorization': auth_type + ' ' + credentials, 'x-mc-app-id': app_id, 'x-mc-date': hdr_date, 'x-mc-req-id': request_id, 'Content-Type': 'application/json' } payload = { "data": [ { "userName": username, "accessKey": access_key } ] } try: request = requests.post(url=url, headers=headers, data=str(payload)) except requests.exceptions.RequestException as e: raise Exception(e) try: response = request.json() except json.decoder.JSONDecodeError: self.logger.error(request.text) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. Response not in JSON format. Response in logs') try: # Catch errors returned by Mimecast api if response['fail']: self.logger.error(response['fail']) raise Exception( 'Failed to refresh key. Status code is {}, see log for details'.format( response['meta']['status'])) except KeyError: self.logger.error(response) raise Exception( 'Unknown error. Mimecast server did not respond correctly. Response in logs') return response def logout(self, url: str, username: str, password: str, auth_type: str, access_key: str, secret_key: str, app_id: str, app_key: str, refresh=False)-> dict: """ This method removes the binding of an access and secret key to a username. This method should always be called when an action is finished, as too many keys bound to a user will lockout the user :param url: The server URL :param username: Username to login with :param password: Users password :param auth_type: The type of authentication: cloud or domain :param access_key: The access key to unbind :param secret_key: The secret key to unbind :param app_id: The application ID for the app that will be logging in :param app_key: The key associated with the app_id :param refresh: Used to stop possible infinite loop with key refresh recursive function :return: Dictionary with information about logout """ # Set full URL url = url + self._URI_LOGOUT # Generate request header values request_id = str(uuid.uuid4()) hdr_date = datetime.datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S") + " UTC" # Decode secret key encoded_secret_key = secret_key.encode() bytes_secret_key = base64.b64decode(encoded_secret_key) # Create hmac message msg = ':'.join([hdr_date, request_id, self._URI_LOGOUT, app_key]) # Create the HMAC SHA1 of the Base64 decoded secret key for the Authorization header hmac_sha1 = hmac.new(bytes_secret_key, msg.encode(), digestmod=hashlib.sha1).digest() # Use the HMAC SHA1 value to sign the hdrDate + ":" requestId + ":" + URI + ":" + appkey sig = base64.encodebytes(hmac_sha1).rstrip() sig = sig.decode('UTF-8') # Create request headers headers = { 'Authorization': 'MC ' + access_key + ':' + sig, 'x-mc-app-id': app_id, 'x-mc-date': hdr_date, 'x-mc-req-id': request_id, 'Content-Type': 'application/json' } payload = { "data": [ { "accessKey": access_key } ] } # Logout try: request = requests.post(url=url, headers=headers, data=str(payload)) except requests.exceptions.RequestException as e: raise Exception(e) try: response = request.json() except json.decoder.JSONDecodeError: self.logger.error(request.text) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. Response not in JSON format. Response in logs') # Check for expired key try: if response['fail']: for errors in response['fail']: for codes in errors['errors']: if codes['code'] == 'err_xdk_binding_expired': self.logger.info('Key expired. Refreshing') # Try to refresh key self.refresh_key(url, username, password, access_key, app_id, auth_type) # Rerun logout with refreshed key if not refresh: self.logout(url, username, password, auth_type, access_key, secret_key, app_id, access_key, True) else: self.logger.error(response) raise Exception('Something went wrong. Contact support') except KeyError: self.logger.error(response) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. Response in logs') return response class MimecastRequests: def __init__(self, logger=None): if logger: self.logger = logger else: self.logger = LogHelper().logger def mimecast_post(self, url: str, uri: str, username: str, password: str, auth_type: str, access_key: str, secret_key: str, app_id: str, app_key: str, data: dict, refresh=False)-> dict: """ This method will send a properly formatted post request to the Mimecast server :param url: The server URL :param uri: The URI for the api call :param username: Username to login with :param password: Users password :param auth_type: The type of authentication: cloud or domain :param access_key: The access key for the session :param secret_key: The secret key for the session :param app_id: The application ID for the app that will be logging in :param app_key: The key associated with the app_id :param data: The payload for the api call :param refresh: Used to stop possible infinite loop with key refresh recursive function :return: """ # Set full URL url = url + uri # Generate request header values request_id = str(uuid.uuid4()) hdr_date = datetime.datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S") + " UTC" # Decode secret key encoded_secret_key = secret_key.encode() bytes_secret_key = base64.b64decode(encoded_secret_key) # Create hmac message msg = ':'.join([hdr_date, request_id, uri, app_key]) # Create the HMAC SHA1 of the Base64 decoded secret key for the Authorization header hmac_sha1 = hmac.new(bytes_secret_key, msg.encode(), digestmod=hashlib.sha1).digest() # Use the HMAC SHA1 value to sign the hdrDate + ":" requestId + ":" + URI + ":" + appkey sig = base64.encodebytes(hmac_sha1).rstrip() sig = sig.decode('UTF-8') # Create request headers headers = { 'Authorization': 'MC ' + access_key + ':' + sig, 'x-mc-app-id': app_id, 'x-mc-date': hdr_date, 'x-mc-req-id': request_id, 'Content-Type': 'application/json' } # build payload data if data is not None: payload = { 'data': [ data ] } else: payload = { 'data': [ ] } try: request = requests.post(url=url, headers=headers, data=str(payload)) except requests.exceptions.RequestException as e: logout = Authentication() logout.logout(url, username, password, auth_type, access_key, secret_key, app_id, access_key) raise Exception(e) try: response = request.json() except json.decoder.JSONDecodeError: self.logger.error(request.text) logout = Authentication() logout.logout(url, username, password, auth_type, access_key, secret_key, app_id, access_key) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. Response not in JSON format. Response in logs') try: # Check for expired key if response['fail']: for errors in response['fail']: for codes in errors['errors']: if codes['code'] == 'err_xdk_binding_expired': self.logger.info('Key expired. Refreshing') # Try to refresh key refresh_key = Authentication() refresh_key.refresh_key(url, username, password, access_key, app_id, auth_type) # Resend request after key refresh if not refresh: response = self.mimecast_post(url, uri, access_key, secret_key, app_id, app_key, data, True) except KeyError: self.logger.error(response) logout = Authentication() logout.logout(url, username, password, auth_type, access_key, secret_key, app_id, access_key) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. Response in logs') try: if response['fail']: self.logger.error(response['fail']) logout = Authentication() logout.logout(url, username, password, auth_type, access_key, secret_key, app_id, access_key) raise Exception( 'Server request failed. Status code is {}, see log for details'.format( response['meta']['status'])) except KeyError: self.logger.error(response) logout = Authentication() logout.logout(url, username, password, auth_type, access_key, secret_key, app_id, access_key) raise Exception( 'Unknown error. The Mimecast server did not respond correctly. Response in logs') return response
40.364055
130
0.54875
1,920
17,518
4.914583
0.128125
0.031475
0.025435
0.026812
0.804472
0.784549
0.75922
0.743853
0.726155
0.726155
0
0.005774
0.367222
17,518
433
131
40.457275
0.845467
0.188264
0
0.71777
0
0
0.18887
0.006385
0
0
0
0
0
1
0.02439
false
0.052265
0.027875
0
0.094077
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
1a2fbb3f9d4cbb78cfc718b436a869fb005a7f82
37
py
Python
stubs/traceback.py
waltermoreira/adama-app
96a795992f64e21a48762fe6902ef22775d086a9
[ "MIT" ]
null
null
null
stubs/traceback.py
waltermoreira/adama-app
96a795992f64e21a48762fe6902ef22775d086a9
[ "MIT" ]
1
2021-06-01T21:43:58.000Z
2021-06-01T21:43:58.000Z
stubs/traceback.py
waltermoreira/adama-app
96a795992f64e21a48762fe6902ef22775d086a9
[ "MIT" ]
null
null
null
import typing def format_exc(): pass
12.333333
22
0.783784
6
37
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.135135
37
3
22
12.333333
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0.5
0.5
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
1
0
1
0
0
8
1a4b4162b5d44ddfa8cbdaf34b3177735085dc69
182
py
Python
src/util/init_random_seeds.py
YorkUCVIL/Wavelet-Flow
8d6d63fa116ec44299c32f37e66817594510f644
[ "MIT" ]
59
2020-10-28T03:09:05.000Z
2022-01-29T22:10:04.000Z
src/util/init_random_seeds.py
YorkUCVIL/Wavelet-Flow
8d6d63fa116ec44299c32f37e66817594510f644
[ "MIT" ]
4
2020-12-24T11:00:40.000Z
2021-05-22T06:14:27.000Z
src/util/init_random_seeds.py
YorkUCVIL/Wavelet-Flow
8d6d63fa116ec44299c32f37e66817594510f644
[ "MIT" ]
2
2020-10-29T01:15:03.000Z
2021-04-20T11:55:51.000Z
import tensorflow as tf import numpy as np from util import * def init_random_seeds(): tf.set_random_seed(config.random_seed.tensorflow) np.random.seed(config.random_seed.numpy)
20.222222
50
0.807692
30
182
4.7
0.5
0.283688
0.22695
0.312057
0.368794
0
0
0
0
0
0
0
0.10989
182
8
51
22.75
0.87037
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
true
0
0.5
0
0.666667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
1a52da598aba93e174058e15f4fe99f95364b27b
119
py
Python
3d_fitting/combine_data.py
duguqiankun/NeuralVoicePuppetry
26b87d98a1ecfe6e4a6738641e6436ab1a9ece31
[ "BSD-3-Clause" ]
null
null
null
3d_fitting/combine_data.py
duguqiankun/NeuralVoicePuppetry
26b87d98a1ecfe6e4a6738641e6436ab1a9ece31
[ "BSD-3-Clause" ]
null
null
null
3d_fitting/combine_data.py
duguqiankun/NeuralVoicePuppetry
26b87d98a1ecfe6e4a6738641e6436ab1a9ece31
[ "BSD-3-Clause" ]
1
2021-12-21T08:20:34.000Z
2021-12-21T08:20:34.000Z
import os src = 'media/frame_avata_adam_gt_fit/train' dst = 'media/frame_avata_adam_gt_fit_cyaudio/train' number =
14.875
51
0.789916
20
119
4.25
0.65
0.235294
0.352941
0.447059
0.564706
0.564706
0
0
0
0
0
0
0.117647
119
8
52
14.875
0.809524
0
0
0
0
0
0.65
0.65
0
0
0
0
0
0
null
null
0
0.25
null
null
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
1a9226b9d379380f9e9c7ec2389dee7fb49d1868
60,415
py
Python
sdk/python/pulumi_oci/database/autonomous_exadata_infrastructure.py
EladGabay/pulumi-oci
6841e27d4a1a7e15c672306b769912efbfd3ba99
[ "ECL-2.0", "Apache-2.0" ]
5
2021-08-17T11:14:46.000Z
2021-12-31T02:07:03.000Z
sdk/python/pulumi_oci/database/autonomous_exadata_infrastructure.py
pulumi-oci/pulumi-oci
6841e27d4a1a7e15c672306b769912efbfd3ba99
[ "ECL-2.0", "Apache-2.0" ]
1
2021-09-06T11:21:29.000Z
2021-09-06T11:21:29.000Z
sdk/python/pulumi_oci/database/autonomous_exadata_infrastructure.py
pulumi-oci/pulumi-oci
6841e27d4a1a7e15c672306b769912efbfd3ba99
[ "ECL-2.0", "Apache-2.0" ]
2
2021-08-24T23:31:30.000Z
2022-01-02T19:26:54.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs from ._inputs import * __all__ = ['AutonomousExadataInfrastructureArgs', 'AutonomousExadataInfrastructure'] @pulumi.input_type class AutonomousExadataInfrastructureArgs: def __init__(__self__, *, availability_domain: pulumi.Input[str], compartment_id: pulumi.Input[str], shape: pulumi.Input[str], subnet_id: pulumi.Input[str], create_async: Optional[pulumi.Input[bool]] = None, defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, display_name: Optional[pulumi.Input[str]] = None, domain: Optional[pulumi.Input[str]] = None, freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, license_model: Optional[pulumi.Input[str]] = None, maintenance_window_details: Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']] = None, nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None): """ The set of arguments for constructing a AutonomousExadataInfrastructure resource. :param pulumi.Input[str] availability_domain: The availability domain where the Autonomous Exadata Infrastructure is located. :param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment the Autonomous Exadata Infrastructure belongs in. :param pulumi.Input[str] shape: The shape of the Autonomous Exadata Infrastructure. The shape determines resources allocated to the Autonomous Exadata Infrastructure (CPU cores, memory and storage). To get a list of shapes, use the ListDbSystemShapes operation. :param pulumi.Input[str] subnet_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet the Autonomous Exadata Infrastructure is associated with. :param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). :param pulumi.Input[str] display_name: (Updatable) The user-friendly name for the Autonomous Exadata Infrastructure. It does not have to be unique. :param pulumi.Input[str] domain: A domain name used for the Autonomous Exadata Infrastructure. If the Oracle-provided Internet and VCN Resolver is enabled for the specified subnet, the domain name for the subnet is used (don't provide one). Otherwise, provide a valid DNS domain name. Hyphens (-) are not permitted. :param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` :param pulumi.Input[str] license_model: The Oracle license model that applies to all the databases in the Autonomous Exadata Infrastructure. The default is BRING_YOUR_OWN_LICENSE. :param pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs'] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. :param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) A list of the [OCIDs](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security groups (NSGs) that this resource belongs to. Setting this to an empty array after the list is created removes the resource from all NSGs. For more information about NSGs, see [Security Rules](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/securityrules.htm). **NsgIds restrictions:** * Autonomous Databases with private access require at least 1 Network Security Group (NSG). The nsgIds array cannot be empty. """ pulumi.set(__self__, "availability_domain", availability_domain) pulumi.set(__self__, "compartment_id", compartment_id) pulumi.set(__self__, "shape", shape) pulumi.set(__self__, "subnet_id", subnet_id) if create_async is not None: pulumi.set(__self__, "create_async", create_async) if defined_tags is not None: pulumi.set(__self__, "defined_tags", defined_tags) if display_name is not None: pulumi.set(__self__, "display_name", display_name) if domain is not None: pulumi.set(__self__, "domain", domain) if freeform_tags is not None: pulumi.set(__self__, "freeform_tags", freeform_tags) if license_model is not None: pulumi.set(__self__, "license_model", license_model) if maintenance_window_details is not None: pulumi.set(__self__, "maintenance_window_details", maintenance_window_details) if nsg_ids is not None: pulumi.set(__self__, "nsg_ids", nsg_ids) @property @pulumi.getter(name="availabilityDomain") def availability_domain(self) -> pulumi.Input[str]: """ The availability domain where the Autonomous Exadata Infrastructure is located. """ return pulumi.get(self, "availability_domain") @availability_domain.setter def availability_domain(self, value: pulumi.Input[str]): pulumi.set(self, "availability_domain", value) @property @pulumi.getter(name="compartmentId") def compartment_id(self) -> pulumi.Input[str]: """ (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment the Autonomous Exadata Infrastructure belongs in. """ return pulumi.get(self, "compartment_id") @compartment_id.setter def compartment_id(self, value: pulumi.Input[str]): pulumi.set(self, "compartment_id", value) @property @pulumi.getter def shape(self) -> pulumi.Input[str]: """ The shape of the Autonomous Exadata Infrastructure. The shape determines resources allocated to the Autonomous Exadata Infrastructure (CPU cores, memory and storage). To get a list of shapes, use the ListDbSystemShapes operation. """ return pulumi.get(self, "shape") @shape.setter def shape(self, value: pulumi.Input[str]): pulumi.set(self, "shape", value) @property @pulumi.getter(name="subnetId") def subnet_id(self) -> pulumi.Input[str]: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet the Autonomous Exadata Infrastructure is associated with. """ return pulumi.get(self, "subnet_id") @subnet_id.setter def subnet_id(self, value: pulumi.Input[str]): pulumi.set(self, "subnet_id", value) @property @pulumi.getter(name="createAsync") def create_async(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "create_async") @create_async.setter def create_async(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "create_async", value) @property @pulumi.getter(name="definedTags") def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). """ return pulumi.get(self, "defined_tags") @defined_tags.setter def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "defined_tags", value) @property @pulumi.getter(name="displayName") def display_name(self) -> Optional[pulumi.Input[str]]: """ (Updatable) The user-friendly name for the Autonomous Exadata Infrastructure. It does not have to be unique. """ return pulumi.get(self, "display_name") @display_name.setter def display_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "display_name", value) @property @pulumi.getter def domain(self) -> Optional[pulumi.Input[str]]: """ A domain name used for the Autonomous Exadata Infrastructure. If the Oracle-provided Internet and VCN Resolver is enabled for the specified subnet, the domain name for the subnet is used (don't provide one). Otherwise, provide a valid DNS domain name. Hyphens (-) are not permitted. """ return pulumi.get(self, "domain") @domain.setter def domain(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "domain", value) @property @pulumi.getter(name="freeformTags") def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` """ return pulumi.get(self, "freeform_tags") @freeform_tags.setter def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "freeform_tags", value) @property @pulumi.getter(name="licenseModel") def license_model(self) -> Optional[pulumi.Input[str]]: """ The Oracle license model that applies to all the databases in the Autonomous Exadata Infrastructure. The default is BRING_YOUR_OWN_LICENSE. """ return pulumi.get(self, "license_model") @license_model.setter def license_model(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "license_model", value) @property @pulumi.getter(name="maintenanceWindowDetails") def maintenance_window_details(self) -> Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']]: """ (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. """ return pulumi.get(self, "maintenance_window_details") @maintenance_window_details.setter def maintenance_window_details(self, value: Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']]): pulumi.set(self, "maintenance_window_details", value) @property @pulumi.getter(name="nsgIds") def nsg_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ (Updatable) A list of the [OCIDs](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security groups (NSGs) that this resource belongs to. Setting this to an empty array after the list is created removes the resource from all NSGs. For more information about NSGs, see [Security Rules](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/securityrules.htm). **NsgIds restrictions:** * Autonomous Databases with private access require at least 1 Network Security Group (NSG). The nsgIds array cannot be empty. """ return pulumi.get(self, "nsg_ids") @nsg_ids.setter def nsg_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "nsg_ids", value) @pulumi.input_type class _AutonomousExadataInfrastructureState: def __init__(__self__, *, availability_domain: Optional[pulumi.Input[str]] = None, compartment_id: Optional[pulumi.Input[str]] = None, create_async: Optional[pulumi.Input[bool]] = None, defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, display_name: Optional[pulumi.Input[str]] = None, domain: Optional[pulumi.Input[str]] = None, freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, hostname: Optional[pulumi.Input[str]] = None, last_maintenance_run_id: Optional[pulumi.Input[str]] = None, license_model: Optional[pulumi.Input[str]] = None, lifecycle_details: Optional[pulumi.Input[str]] = None, maintenance_window: Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowArgs']] = None, maintenance_window_details: Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']] = None, next_maintenance_run_id: Optional[pulumi.Input[str]] = None, nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, scan_dns_name: Optional[pulumi.Input[str]] = None, shape: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[str]] = None, subnet_id: Optional[pulumi.Input[str]] = None, time_created: Optional[pulumi.Input[str]] = None, zone_id: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering AutonomousExadataInfrastructure resources. :param pulumi.Input[str] availability_domain: The availability domain where the Autonomous Exadata Infrastructure is located. :param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment the Autonomous Exadata Infrastructure belongs in. :param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). :param pulumi.Input[str] display_name: (Updatable) The user-friendly name for the Autonomous Exadata Infrastructure. It does not have to be unique. :param pulumi.Input[str] domain: A domain name used for the Autonomous Exadata Infrastructure. If the Oracle-provided Internet and VCN Resolver is enabled for the specified subnet, the domain name for the subnet is used (don't provide one). Otherwise, provide a valid DNS domain name. Hyphens (-) are not permitted. :param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` :param pulumi.Input[str] hostname: The host name for the Autonomous Exadata Infrastructure node. :param pulumi.Input[str] last_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run. :param pulumi.Input[str] license_model: The Oracle license model that applies to all the databases in the Autonomous Exadata Infrastructure. The default is BRING_YOUR_OWN_LICENSE. :param pulumi.Input[str] lifecycle_details: Additional information about the current lifecycle state of the Autonomous Exadata Infrastructure. :param pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowArgs'] maintenance_window: The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. :param pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs'] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. :param pulumi.Input[str] next_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run. :param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) A list of the [OCIDs](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security groups (NSGs) that this resource belongs to. Setting this to an empty array after the list is created removes the resource from all NSGs. For more information about NSGs, see [Security Rules](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/securityrules.htm). **NsgIds restrictions:** * Autonomous Databases with private access require at least 1 Network Security Group (NSG). The nsgIds array cannot be empty. :param pulumi.Input[str] scan_dns_name: The FQDN of the DNS record for the SCAN IP addresses that are associated with the Autonomous Exadata Infrastructure. :param pulumi.Input[str] shape: The shape of the Autonomous Exadata Infrastructure. The shape determines resources allocated to the Autonomous Exadata Infrastructure (CPU cores, memory and storage). To get a list of shapes, use the ListDbSystemShapes operation. :param pulumi.Input[str] state: The current lifecycle state of the Autonomous Exadata Infrastructure. :param pulumi.Input[str] subnet_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet the Autonomous Exadata Infrastructure is associated with. :param pulumi.Input[str] time_created: The date and time the Autonomous Exadata Infrastructure was created. :param pulumi.Input[str] zone_id: The OCID of the zone the Autonomous Exadata Infrastructure is associated with. """ if availability_domain is not None: pulumi.set(__self__, "availability_domain", availability_domain) if compartment_id is not None: pulumi.set(__self__, "compartment_id", compartment_id) if create_async is not None: pulumi.set(__self__, "create_async", create_async) if defined_tags is not None: pulumi.set(__self__, "defined_tags", defined_tags) if display_name is not None: pulumi.set(__self__, "display_name", display_name) if domain is not None: pulumi.set(__self__, "domain", domain) if freeform_tags is not None: pulumi.set(__self__, "freeform_tags", freeform_tags) if hostname is not None: pulumi.set(__self__, "hostname", hostname) if last_maintenance_run_id is not None: pulumi.set(__self__, "last_maintenance_run_id", last_maintenance_run_id) if license_model is not None: pulumi.set(__self__, "license_model", license_model) if lifecycle_details is not None: pulumi.set(__self__, "lifecycle_details", lifecycle_details) if maintenance_window is not None: pulumi.set(__self__, "maintenance_window", maintenance_window) if maintenance_window_details is not None: pulumi.set(__self__, "maintenance_window_details", maintenance_window_details) if next_maintenance_run_id is not None: pulumi.set(__self__, "next_maintenance_run_id", next_maintenance_run_id) if nsg_ids is not None: pulumi.set(__self__, "nsg_ids", nsg_ids) if scan_dns_name is not None: pulumi.set(__self__, "scan_dns_name", scan_dns_name) if shape is not None: pulumi.set(__self__, "shape", shape) if state is not None: pulumi.set(__self__, "state", state) if subnet_id is not None: pulumi.set(__self__, "subnet_id", subnet_id) if time_created is not None: pulumi.set(__self__, "time_created", time_created) if zone_id is not None: pulumi.set(__self__, "zone_id", zone_id) @property @pulumi.getter(name="availabilityDomain") def availability_domain(self) -> Optional[pulumi.Input[str]]: """ The availability domain where the Autonomous Exadata Infrastructure is located. """ return pulumi.get(self, "availability_domain") @availability_domain.setter def availability_domain(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "availability_domain", value) @property @pulumi.getter(name="compartmentId") def compartment_id(self) -> Optional[pulumi.Input[str]]: """ (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment the Autonomous Exadata Infrastructure belongs in. """ return pulumi.get(self, "compartment_id") @compartment_id.setter def compartment_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "compartment_id", value) @property @pulumi.getter(name="createAsync") def create_async(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "create_async") @create_async.setter def create_async(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "create_async", value) @property @pulumi.getter(name="definedTags") def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). """ return pulumi.get(self, "defined_tags") @defined_tags.setter def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "defined_tags", value) @property @pulumi.getter(name="displayName") def display_name(self) -> Optional[pulumi.Input[str]]: """ (Updatable) The user-friendly name for the Autonomous Exadata Infrastructure. It does not have to be unique. """ return pulumi.get(self, "display_name") @display_name.setter def display_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "display_name", value) @property @pulumi.getter def domain(self) -> Optional[pulumi.Input[str]]: """ A domain name used for the Autonomous Exadata Infrastructure. If the Oracle-provided Internet and VCN Resolver is enabled for the specified subnet, the domain name for the subnet is used (don't provide one). Otherwise, provide a valid DNS domain name. Hyphens (-) are not permitted. """ return pulumi.get(self, "domain") @domain.setter def domain(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "domain", value) @property @pulumi.getter(name="freeformTags") def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` """ return pulumi.get(self, "freeform_tags") @freeform_tags.setter def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "freeform_tags", value) @property @pulumi.getter def hostname(self) -> Optional[pulumi.Input[str]]: """ The host name for the Autonomous Exadata Infrastructure node. """ return pulumi.get(self, "hostname") @hostname.setter def hostname(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "hostname", value) @property @pulumi.getter(name="lastMaintenanceRunId") def last_maintenance_run_id(self) -> Optional[pulumi.Input[str]]: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run. """ return pulumi.get(self, "last_maintenance_run_id") @last_maintenance_run_id.setter def last_maintenance_run_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "last_maintenance_run_id", value) @property @pulumi.getter(name="licenseModel") def license_model(self) -> Optional[pulumi.Input[str]]: """ The Oracle license model that applies to all the databases in the Autonomous Exadata Infrastructure. The default is BRING_YOUR_OWN_LICENSE. """ return pulumi.get(self, "license_model") @license_model.setter def license_model(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "license_model", value) @property @pulumi.getter(name="lifecycleDetails") def lifecycle_details(self) -> Optional[pulumi.Input[str]]: """ Additional information about the current lifecycle state of the Autonomous Exadata Infrastructure. """ return pulumi.get(self, "lifecycle_details") @lifecycle_details.setter def lifecycle_details(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "lifecycle_details", value) @property @pulumi.getter(name="maintenanceWindow") def maintenance_window(self) -> Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowArgs']]: """ The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. """ return pulumi.get(self, "maintenance_window") @maintenance_window.setter def maintenance_window(self, value: Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowArgs']]): pulumi.set(self, "maintenance_window", value) @property @pulumi.getter(name="maintenanceWindowDetails") def maintenance_window_details(self) -> Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']]: """ (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. """ return pulumi.get(self, "maintenance_window_details") @maintenance_window_details.setter def maintenance_window_details(self, value: Optional[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']]): pulumi.set(self, "maintenance_window_details", value) @property @pulumi.getter(name="nextMaintenanceRunId") def next_maintenance_run_id(self) -> Optional[pulumi.Input[str]]: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run. """ return pulumi.get(self, "next_maintenance_run_id") @next_maintenance_run_id.setter def next_maintenance_run_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "next_maintenance_run_id", value) @property @pulumi.getter(name="nsgIds") def nsg_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ (Updatable) A list of the [OCIDs](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security groups (NSGs) that this resource belongs to. Setting this to an empty array after the list is created removes the resource from all NSGs. For more information about NSGs, see [Security Rules](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/securityrules.htm). **NsgIds restrictions:** * Autonomous Databases with private access require at least 1 Network Security Group (NSG). The nsgIds array cannot be empty. """ return pulumi.get(self, "nsg_ids") @nsg_ids.setter def nsg_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "nsg_ids", value) @property @pulumi.getter(name="scanDnsName") def scan_dns_name(self) -> Optional[pulumi.Input[str]]: """ The FQDN of the DNS record for the SCAN IP addresses that are associated with the Autonomous Exadata Infrastructure. """ return pulumi.get(self, "scan_dns_name") @scan_dns_name.setter def scan_dns_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "scan_dns_name", value) @property @pulumi.getter def shape(self) -> Optional[pulumi.Input[str]]: """ The shape of the Autonomous Exadata Infrastructure. The shape determines resources allocated to the Autonomous Exadata Infrastructure (CPU cores, memory and storage). To get a list of shapes, use the ListDbSystemShapes operation. """ return pulumi.get(self, "shape") @shape.setter def shape(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "shape", value) @property @pulumi.getter def state(self) -> Optional[pulumi.Input[str]]: """ The current lifecycle state of the Autonomous Exadata Infrastructure. """ return pulumi.get(self, "state") @state.setter def state(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "state", value) @property @pulumi.getter(name="subnetId") def subnet_id(self) -> Optional[pulumi.Input[str]]: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet the Autonomous Exadata Infrastructure is associated with. """ return pulumi.get(self, "subnet_id") @subnet_id.setter def subnet_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "subnet_id", value) @property @pulumi.getter(name="timeCreated") def time_created(self) -> Optional[pulumi.Input[str]]: """ The date and time the Autonomous Exadata Infrastructure was created. """ return pulumi.get(self, "time_created") @time_created.setter def time_created(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "time_created", value) @property @pulumi.getter(name="zoneId") def zone_id(self) -> Optional[pulumi.Input[str]]: """ The OCID of the zone the Autonomous Exadata Infrastructure is associated with. """ return pulumi.get(self, "zone_id") @zone_id.setter def zone_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "zone_id", value) class AutonomousExadataInfrastructure(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, availability_domain: Optional[pulumi.Input[str]] = None, compartment_id: Optional[pulumi.Input[str]] = None, create_async: Optional[pulumi.Input[bool]] = None, defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, display_name: Optional[pulumi.Input[str]] = None, domain: Optional[pulumi.Input[str]] = None, freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, license_model: Optional[pulumi.Input[str]] = None, maintenance_window_details: Optional[pulumi.Input[pulumi.InputType['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']]] = None, nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, shape: Optional[pulumi.Input[str]] = None, subnet_id: Optional[pulumi.Input[str]] = None, __props__=None): """ This resource provides the Autonomous Exadata Infrastructure resource in Oracle Cloud Infrastructure Database service. Creates a new Autonomous Exadata Infrastructure in the specified compartment and availability domain. ## Example Usage ```python import pulumi import pulumi_oci as oci test_autonomous_exadata_infrastructure = oci.database.AutonomousExadataInfrastructure("testAutonomousExadataInfrastructure", availability_domain=var["autonomous_exadata_infrastructure_availability_domain"], compartment_id=var["compartment_id"], shape=var["autonomous_exadata_infrastructure_shape"], subnet_id=oci_core_subnet["test_subnet"]["id"], defined_tags={ "Operations.CostCenter": "42", }, display_name=var["autonomous_exadata_infrastructure_display_name"], domain=var["autonomous_exadata_infrastructure_domain"], freeform_tags={ "Department": "Finance", }, license_model=var["autonomous_exadata_infrastructure_license_model"], maintenance_window_details=oci.database.AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs( preference=var["autonomous_exadata_infrastructure_maintenance_window_details_preference"], days_of_weeks=[oci.database.AutonomousExadataInfrastructureMaintenanceWindowDetailsDaysOfWeekArgs( name=var["autonomous_exadata_infrastructure_maintenance_window_details_days_of_week_name"], )], hours_of_days=var["autonomous_exadata_infrastructure_maintenance_window_details_hours_of_day"], lead_time_in_weeks=var["autonomous_exadata_infrastructure_maintenance_window_details_lead_time_in_weeks"], months=[oci.database.AutonomousExadataInfrastructureMaintenanceWindowDetailsMonthArgs( name=var["autonomous_exadata_infrastructure_maintenance_window_details_months_name"], )], weeks_of_months=var["autonomous_exadata_infrastructure_maintenance_window_details_weeks_of_month"], ), nsg_ids=var["autonomous_exadata_infrastructure_nsg_ids"]) ``` ## Import AutonomousExadataInfrastructures can be imported using the `id`, e.g. ```sh $ pulumi import oci:database/autonomousExadataInfrastructure:AutonomousExadataInfrastructure test_autonomous_exadata_infrastructure "id" ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] availability_domain: The availability domain where the Autonomous Exadata Infrastructure is located. :param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment the Autonomous Exadata Infrastructure belongs in. :param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). :param pulumi.Input[str] display_name: (Updatable) The user-friendly name for the Autonomous Exadata Infrastructure. It does not have to be unique. :param pulumi.Input[str] domain: A domain name used for the Autonomous Exadata Infrastructure. If the Oracle-provided Internet and VCN Resolver is enabled for the specified subnet, the domain name for the subnet is used (don't provide one). Otherwise, provide a valid DNS domain name. Hyphens (-) are not permitted. :param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` :param pulumi.Input[str] license_model: The Oracle license model that applies to all the databases in the Autonomous Exadata Infrastructure. The default is BRING_YOUR_OWN_LICENSE. :param pulumi.Input[pulumi.InputType['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. :param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) A list of the [OCIDs](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security groups (NSGs) that this resource belongs to. Setting this to an empty array after the list is created removes the resource from all NSGs. For more information about NSGs, see [Security Rules](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/securityrules.htm). **NsgIds restrictions:** * Autonomous Databases with private access require at least 1 Network Security Group (NSG). The nsgIds array cannot be empty. :param pulumi.Input[str] shape: The shape of the Autonomous Exadata Infrastructure. The shape determines resources allocated to the Autonomous Exadata Infrastructure (CPU cores, memory and storage). To get a list of shapes, use the ListDbSystemShapes operation. :param pulumi.Input[str] subnet_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet the Autonomous Exadata Infrastructure is associated with. """ ... @overload def __init__(__self__, resource_name: str, args: AutonomousExadataInfrastructureArgs, opts: Optional[pulumi.ResourceOptions] = None): """ This resource provides the Autonomous Exadata Infrastructure resource in Oracle Cloud Infrastructure Database service. Creates a new Autonomous Exadata Infrastructure in the specified compartment and availability domain. ## Example Usage ```python import pulumi import pulumi_oci as oci test_autonomous_exadata_infrastructure = oci.database.AutonomousExadataInfrastructure("testAutonomousExadataInfrastructure", availability_domain=var["autonomous_exadata_infrastructure_availability_domain"], compartment_id=var["compartment_id"], shape=var["autonomous_exadata_infrastructure_shape"], subnet_id=oci_core_subnet["test_subnet"]["id"], defined_tags={ "Operations.CostCenter": "42", }, display_name=var["autonomous_exadata_infrastructure_display_name"], domain=var["autonomous_exadata_infrastructure_domain"], freeform_tags={ "Department": "Finance", }, license_model=var["autonomous_exadata_infrastructure_license_model"], maintenance_window_details=oci.database.AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs( preference=var["autonomous_exadata_infrastructure_maintenance_window_details_preference"], days_of_weeks=[oci.database.AutonomousExadataInfrastructureMaintenanceWindowDetailsDaysOfWeekArgs( name=var["autonomous_exadata_infrastructure_maintenance_window_details_days_of_week_name"], )], hours_of_days=var["autonomous_exadata_infrastructure_maintenance_window_details_hours_of_day"], lead_time_in_weeks=var["autonomous_exadata_infrastructure_maintenance_window_details_lead_time_in_weeks"], months=[oci.database.AutonomousExadataInfrastructureMaintenanceWindowDetailsMonthArgs( name=var["autonomous_exadata_infrastructure_maintenance_window_details_months_name"], )], weeks_of_months=var["autonomous_exadata_infrastructure_maintenance_window_details_weeks_of_month"], ), nsg_ids=var["autonomous_exadata_infrastructure_nsg_ids"]) ``` ## Import AutonomousExadataInfrastructures can be imported using the `id`, e.g. ```sh $ pulumi import oci:database/autonomousExadataInfrastructure:AutonomousExadataInfrastructure test_autonomous_exadata_infrastructure "id" ``` :param str resource_name: The name of the resource. :param AutonomousExadataInfrastructureArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(AutonomousExadataInfrastructureArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, availability_domain: Optional[pulumi.Input[str]] = None, compartment_id: Optional[pulumi.Input[str]] = None, create_async: Optional[pulumi.Input[bool]] = None, defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, display_name: Optional[pulumi.Input[str]] = None, domain: Optional[pulumi.Input[str]] = None, freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, license_model: Optional[pulumi.Input[str]] = None, maintenance_window_details: Optional[pulumi.Input[pulumi.InputType['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']]] = None, nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, shape: Optional[pulumi.Input[str]] = None, subnet_id: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = AutonomousExadataInfrastructureArgs.__new__(AutonomousExadataInfrastructureArgs) if availability_domain is None and not opts.urn: raise TypeError("Missing required property 'availability_domain'") __props__.__dict__["availability_domain"] = availability_domain if compartment_id is None and not opts.urn: raise TypeError("Missing required property 'compartment_id'") __props__.__dict__["compartment_id"] = compartment_id __props__.__dict__["create_async"] = create_async __props__.__dict__["defined_tags"] = defined_tags __props__.__dict__["display_name"] = display_name __props__.__dict__["domain"] = domain __props__.__dict__["freeform_tags"] = freeform_tags __props__.__dict__["license_model"] = license_model __props__.__dict__["maintenance_window_details"] = maintenance_window_details __props__.__dict__["nsg_ids"] = nsg_ids if shape is None and not opts.urn: raise TypeError("Missing required property 'shape'") __props__.__dict__["shape"] = shape if subnet_id is None and not opts.urn: raise TypeError("Missing required property 'subnet_id'") __props__.__dict__["subnet_id"] = subnet_id __props__.__dict__["hostname"] = None __props__.__dict__["last_maintenance_run_id"] = None __props__.__dict__["lifecycle_details"] = None __props__.__dict__["maintenance_window"] = None __props__.__dict__["next_maintenance_run_id"] = None __props__.__dict__["scan_dns_name"] = None __props__.__dict__["state"] = None __props__.__dict__["time_created"] = None __props__.__dict__["zone_id"] = None super(AutonomousExadataInfrastructure, __self__).__init__( 'oci:database/autonomousExadataInfrastructure:AutonomousExadataInfrastructure', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, availability_domain: Optional[pulumi.Input[str]] = None, compartment_id: Optional[pulumi.Input[str]] = None, create_async: Optional[pulumi.Input[bool]] = None, defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, display_name: Optional[pulumi.Input[str]] = None, domain: Optional[pulumi.Input[str]] = None, freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, hostname: Optional[pulumi.Input[str]] = None, last_maintenance_run_id: Optional[pulumi.Input[str]] = None, license_model: Optional[pulumi.Input[str]] = None, lifecycle_details: Optional[pulumi.Input[str]] = None, maintenance_window: Optional[pulumi.Input[pulumi.InputType['AutonomousExadataInfrastructureMaintenanceWindowArgs']]] = None, maintenance_window_details: Optional[pulumi.Input[pulumi.InputType['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']]] = None, next_maintenance_run_id: Optional[pulumi.Input[str]] = None, nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, scan_dns_name: Optional[pulumi.Input[str]] = None, shape: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[str]] = None, subnet_id: Optional[pulumi.Input[str]] = None, time_created: Optional[pulumi.Input[str]] = None, zone_id: Optional[pulumi.Input[str]] = None) -> 'AutonomousExadataInfrastructure': """ Get an existing AutonomousExadataInfrastructure resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] availability_domain: The availability domain where the Autonomous Exadata Infrastructure is located. :param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment the Autonomous Exadata Infrastructure belongs in. :param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). :param pulumi.Input[str] display_name: (Updatable) The user-friendly name for the Autonomous Exadata Infrastructure. It does not have to be unique. :param pulumi.Input[str] domain: A domain name used for the Autonomous Exadata Infrastructure. If the Oracle-provided Internet and VCN Resolver is enabled for the specified subnet, the domain name for the subnet is used (don't provide one). Otherwise, provide a valid DNS domain name. Hyphens (-) are not permitted. :param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` :param pulumi.Input[str] hostname: The host name for the Autonomous Exadata Infrastructure node. :param pulumi.Input[str] last_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run. :param pulumi.Input[str] license_model: The Oracle license model that applies to all the databases in the Autonomous Exadata Infrastructure. The default is BRING_YOUR_OWN_LICENSE. :param pulumi.Input[str] lifecycle_details: Additional information about the current lifecycle state of the Autonomous Exadata Infrastructure. :param pulumi.Input[pulumi.InputType['AutonomousExadataInfrastructureMaintenanceWindowArgs']] maintenance_window: The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. :param pulumi.Input[pulumi.InputType['AutonomousExadataInfrastructureMaintenanceWindowDetailsArgs']] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. :param pulumi.Input[str] next_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run. :param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) A list of the [OCIDs](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security groups (NSGs) that this resource belongs to. Setting this to an empty array after the list is created removes the resource from all NSGs. For more information about NSGs, see [Security Rules](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/securityrules.htm). **NsgIds restrictions:** * Autonomous Databases with private access require at least 1 Network Security Group (NSG). The nsgIds array cannot be empty. :param pulumi.Input[str] scan_dns_name: The FQDN of the DNS record for the SCAN IP addresses that are associated with the Autonomous Exadata Infrastructure. :param pulumi.Input[str] shape: The shape of the Autonomous Exadata Infrastructure. The shape determines resources allocated to the Autonomous Exadata Infrastructure (CPU cores, memory and storage). To get a list of shapes, use the ListDbSystemShapes operation. :param pulumi.Input[str] state: The current lifecycle state of the Autonomous Exadata Infrastructure. :param pulumi.Input[str] subnet_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet the Autonomous Exadata Infrastructure is associated with. :param pulumi.Input[str] time_created: The date and time the Autonomous Exadata Infrastructure was created. :param pulumi.Input[str] zone_id: The OCID of the zone the Autonomous Exadata Infrastructure is associated with. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _AutonomousExadataInfrastructureState.__new__(_AutonomousExadataInfrastructureState) __props__.__dict__["availability_domain"] = availability_domain __props__.__dict__["compartment_id"] = compartment_id __props__.__dict__["create_async"] = create_async __props__.__dict__["defined_tags"] = defined_tags __props__.__dict__["display_name"] = display_name __props__.__dict__["domain"] = domain __props__.__dict__["freeform_tags"] = freeform_tags __props__.__dict__["hostname"] = hostname __props__.__dict__["last_maintenance_run_id"] = last_maintenance_run_id __props__.__dict__["license_model"] = license_model __props__.__dict__["lifecycle_details"] = lifecycle_details __props__.__dict__["maintenance_window"] = maintenance_window __props__.__dict__["maintenance_window_details"] = maintenance_window_details __props__.__dict__["next_maintenance_run_id"] = next_maintenance_run_id __props__.__dict__["nsg_ids"] = nsg_ids __props__.__dict__["scan_dns_name"] = scan_dns_name __props__.__dict__["shape"] = shape __props__.__dict__["state"] = state __props__.__dict__["subnet_id"] = subnet_id __props__.__dict__["time_created"] = time_created __props__.__dict__["zone_id"] = zone_id return AutonomousExadataInfrastructure(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="availabilityDomain") def availability_domain(self) -> pulumi.Output[str]: """ The availability domain where the Autonomous Exadata Infrastructure is located. """ return pulumi.get(self, "availability_domain") @property @pulumi.getter(name="compartmentId") def compartment_id(self) -> pulumi.Output[str]: """ (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment the Autonomous Exadata Infrastructure belongs in. """ return pulumi.get(self, "compartment_id") @property @pulumi.getter(name="createAsync") def create_async(self) -> pulumi.Output[Optional[bool]]: return pulumi.get(self, "create_async") @property @pulumi.getter(name="definedTags") def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]: """ (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). """ return pulumi.get(self, "defined_tags") @property @pulumi.getter(name="displayName") def display_name(self) -> pulumi.Output[str]: """ (Updatable) The user-friendly name for the Autonomous Exadata Infrastructure. It does not have to be unique. """ return pulumi.get(self, "display_name") @property @pulumi.getter def domain(self) -> pulumi.Output[str]: """ A domain name used for the Autonomous Exadata Infrastructure. If the Oracle-provided Internet and VCN Resolver is enabled for the specified subnet, the domain name for the subnet is used (don't provide one). Otherwise, provide a valid DNS domain name. Hyphens (-) are not permitted. """ return pulumi.get(self, "domain") @property @pulumi.getter(name="freeformTags") def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]: """ (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` """ return pulumi.get(self, "freeform_tags") @property @pulumi.getter def hostname(self) -> pulumi.Output[str]: """ The host name for the Autonomous Exadata Infrastructure node. """ return pulumi.get(self, "hostname") @property @pulumi.getter(name="lastMaintenanceRunId") def last_maintenance_run_id(self) -> pulumi.Output[str]: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run. """ return pulumi.get(self, "last_maintenance_run_id") @property @pulumi.getter(name="licenseModel") def license_model(self) -> pulumi.Output[str]: """ The Oracle license model that applies to all the databases in the Autonomous Exadata Infrastructure. The default is BRING_YOUR_OWN_LICENSE. """ return pulumi.get(self, "license_model") @property @pulumi.getter(name="lifecycleDetails") def lifecycle_details(self) -> pulumi.Output[str]: """ Additional information about the current lifecycle state of the Autonomous Exadata Infrastructure. """ return pulumi.get(self, "lifecycle_details") @property @pulumi.getter(name="maintenanceWindow") def maintenance_window(self) -> pulumi.Output['outputs.AutonomousExadataInfrastructureMaintenanceWindow']: """ The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. """ return pulumi.get(self, "maintenance_window") @property @pulumi.getter(name="maintenanceWindowDetails") def maintenance_window_details(self) -> pulumi.Output[Optional['outputs.AutonomousExadataInfrastructureMaintenanceWindowDetails']]: """ (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window. """ return pulumi.get(self, "maintenance_window_details") @property @pulumi.getter(name="nextMaintenanceRunId") def next_maintenance_run_id(self) -> pulumi.Output[str]: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run. """ return pulumi.get(self, "next_maintenance_run_id") @property @pulumi.getter(name="nsgIds") def nsg_ids(self) -> pulumi.Output[Optional[Sequence[str]]]: """ (Updatable) A list of the [OCIDs](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security groups (NSGs) that this resource belongs to. Setting this to an empty array after the list is created removes the resource from all NSGs. For more information about NSGs, see [Security Rules](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/securityrules.htm). **NsgIds restrictions:** * Autonomous Databases with private access require at least 1 Network Security Group (NSG). The nsgIds array cannot be empty. """ return pulumi.get(self, "nsg_ids") @property @pulumi.getter(name="scanDnsName") def scan_dns_name(self) -> pulumi.Output[str]: """ The FQDN of the DNS record for the SCAN IP addresses that are associated with the Autonomous Exadata Infrastructure. """ return pulumi.get(self, "scan_dns_name") @property @pulumi.getter def shape(self) -> pulumi.Output[str]: """ The shape of the Autonomous Exadata Infrastructure. The shape determines resources allocated to the Autonomous Exadata Infrastructure (CPU cores, memory and storage). To get a list of shapes, use the ListDbSystemShapes operation. """ return pulumi.get(self, "shape") @property @pulumi.getter def state(self) -> pulumi.Output[str]: """ The current lifecycle state of the Autonomous Exadata Infrastructure. """ return pulumi.get(self, "state") @property @pulumi.getter(name="subnetId") def subnet_id(self) -> pulumi.Output[str]: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet the Autonomous Exadata Infrastructure is associated with. """ return pulumi.get(self, "subnet_id") @property @pulumi.getter(name="timeCreated") def time_created(self) -> pulumi.Output[str]: """ The date and time the Autonomous Exadata Infrastructure was created. """ return pulumi.get(self, "time_created") @property @pulumi.getter(name="zoneId") def zone_id(self) -> pulumi.Output[str]: """ The OCID of the zone the Autonomous Exadata Infrastructure is associated with. """ return pulumi.get(self, "zone_id")
59.995035
497
0.701945
7,126
60,415
5.757929
0.045046
0.059784
0.052546
0.044503
0.926665
0.910409
0.896493
0.884039
0.868368
0.838975
0
0.000249
0.201291
60,415
1,006
498
60.054672
0.850064
0.475014
0
0.696809
1
0
0.137533
0.055972
0
0
0
0
0
1
0.166667
false
0.001773
0.012411
0.005319
0.281915
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1abbb898ffb4e7facc271bb28e9c66210e52dd9c
4,711
py
Python
tests/test_fluids.py
mitchute/Open-GHX
7a88872c36200c620cfd07994119cfb243a998c9
[ "MIT" ]
4
2017-10-09T21:08:44.000Z
2020-11-18T11:09:56.000Z
tests/test_fluids.py
mitchute/Open-GHX
7a88872c36200c620cfd07994119cfb243a998c9
[ "MIT" ]
1
2017-08-18T01:44:13.000Z
2017-08-18T02:23:21.000Z
tests/test_fluids.py
mitchute/Open-GHX
7a88872c36200c620cfd07994119cfb243a998c9
[ "MIT" ]
3
2016-09-08T14:57:21.000Z
2021-06-29T08:42:08.000Z
import unittest from ghx.fluids import FluidsClass class TestFluidsClass(unittest.TestCase): def test_dens(self): """ Tests fluid density calculation routine Reference values come from Cengel & Ghajar 2015 Cengel, Y.A., & Ghajar, A.J. 2015. Heat and Mass Transfer, Fundamentals and Applications. McGraw-Hill. New York, New York. """ dict_fluid = {'Type': 'Water', 'Concentration': 100, 'Flow Rate': 0.000303} tolerance = 1.0 curr_tst = FluidsClass(dict_fluid, 20, False) self.assertAlmostEqual(curr_tst.dens(), 998.0, delta=tolerance) curr_tst.temperature = 40 self.assertAlmostEqual(curr_tst.dens(), 992.1, delta=tolerance) curr_tst.temperature = 60 self.assertAlmostEqual(curr_tst.dens(), 983.3, delta=tolerance) curr_tst.temperature = 80 self.assertAlmostEqual(curr_tst.dens(), 971.8, delta=tolerance) def test_cp(self): """ Tests fluid specific heat calculation routine Reference values come from Cengel & Ghajar 2015 Cengel, Y.A., & Ghajar, A.J. 2015. Heat and Mass Transfer, Fundamentals and Applications. McGraw-Hill. New York, New York. """ dict_fluid = {'Type': 'Water', 'Concentration': 100, 'Flow Rate': 0.000303} tolerance = 4.0 curr_tst = FluidsClass(dict_fluid, 20, False) self.assertAlmostEqual(curr_tst.cp(), 4182, delta=tolerance) curr_tst.temperature = 40 self.assertAlmostEqual(curr_tst.cp(), 4179, delta=tolerance) curr_tst.temperature = 60 self.assertAlmostEqual(curr_tst.cp(), 4185, delta=tolerance) curr_tst.temperature = 80 self.assertAlmostEqual(curr_tst.cp(), 4197, delta=tolerance) def test_visc(self): """ Tests fluid viscosity calculations Reference values come from Cengel & Ghajar 2015 Cengel, Y.A., & Ghajar, A.J. 2015. Heat and Mass Transfer, Fundamentals and Applications. McGraw-Hill. New York, New York. """ dict_fluid = {'Type': 'Water', 'Concentration': 100, 'Flow Rate': 0.000303} tolerance = 1E-4 curr_tst = FluidsClass(dict_fluid, 20, False) self.assertAlmostEqual(curr_tst.visc(), 1.002E-3, delta=tolerance) curr_tst.temperature = 40 self.assertAlmostEqual(curr_tst.visc(), 0.653E-3, delta=tolerance) curr_tst.temperature = 60 self.assertAlmostEqual(curr_tst.visc(), 0.467E-3, delta=tolerance) curr_tst.temperature = 80 self.assertAlmostEqual(curr_tst.visc(), 0.355E-3, delta=tolerance) def test_cond(self): """ Tests fluid conductivity calculations Reference values come from Cengel & Ghajar 2015 Cengel, Y.A., & Ghajar, A.J. 2015. Heat and Mass Transfer, Fundamentals and Applications. McGraw-Hill. New York, New York. """ dict_fluid = {'Type': 'Water', 'Concentration': 100, 'Flow Rate': 0.000303} tolerance = 1E-2 curr_tst = FluidsClass(dict_fluid, 20, False) self.assertAlmostEqual(curr_tst.cond(), 0.598, delta=tolerance) curr_tst.temperature = 40 self.assertAlmostEqual(curr_tst.cond(), 0.631, delta=tolerance) curr_tst.temperature = 60 self.assertAlmostEqual(curr_tst.cond(), 0.654, delta=tolerance) curr_tst.temperature = 80 self.assertAlmostEqual(curr_tst.cond(), 0.670, delta=tolerance) def test_pr(self): """ Tests fluid Prandtl number calculations Reference values come from Cengel & Ghajar 2015 Cengel, Y.A., & Ghajar, A.J. 2015. Heat and Mass Transfer, Fundamentals and Applications. McGraw-Hill. New York, New York. """ dict_fluid = {'Type': 'Water', 'Concentration': 100, 'Flow Rate': 0.000303} tolerance = 1E-1 curr_tst = FluidsClass(dict_fluid, 20, False) self.assertAlmostEqual(curr_tst.pr(), 7.01, delta=tolerance) curr_tst.temperature = 40 self.assertAlmostEqual(curr_tst.pr(), 4.32, delta=tolerance) curr_tst.temperature = 60 self.assertAlmostEqual(curr_tst.pr(), 2.99, delta=tolerance) curr_tst.temperature = 80 self.assertAlmostEqual(curr_tst.pr(), 2.22, delta=tolerance)
32.267123
98
0.591594
530
4,711
5.154717
0.177358
0.102489
0.183016
0.204978
0.853587
0.841874
0.826135
0.826135
0.826135
0.826135
0
0.06734
0.306517
4,711
145
99
32.489655
0.768901
0.225642
0
0.514706
0
0
0.047444
0
0
0
0
0
0.294118
1
0.073529
false
0
0.029412
0
0.117647
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
1ac5a8ab3836b92ed69d1b3e578ab23b456765c1
3,071
py
Python
terrascript/vcd/d.py
mjuenema/python-terrascript
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
[ "BSD-2-Clause" ]
507
2017-07-26T02:58:38.000Z
2022-01-21T12:35:13.000Z
terrascript/vcd/d.py
mjuenema/python-terrascript
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
[ "BSD-2-Clause" ]
135
2017-07-20T12:01:59.000Z
2021-10-04T22:25:40.000Z
terrascript/vcd/d.py
mjuenema/python-terrascript
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
[ "BSD-2-Clause" ]
81
2018-02-20T17:55:28.000Z
2022-01-31T07:08:40.000Z
# terrascript/vcd/d.py # Automatically generated by tools/makecode.py () import warnings warnings.warn( "using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2 ) import terrascript class vcd_catalog(terrascript.Data): pass class vcd_catalog_item(terrascript.Data): pass class vcd_catalog_media(terrascript.Data): pass class vcd_edgegateway(terrascript.Data): pass class vcd_external_network(terrascript.Data): pass class vcd_external_network_v2(terrascript.Data): pass class vcd_global_role(terrascript.Data): pass class vcd_independent_disk(terrascript.Data): pass class vcd_lb_app_profile(terrascript.Data): pass class vcd_lb_app_rule(terrascript.Data): pass class vcd_lb_server_pool(terrascript.Data): pass class vcd_lb_service_monitor(terrascript.Data): pass class vcd_lb_virtual_server(terrascript.Data): pass class vcd_network_direct(terrascript.Data): pass class vcd_network_isolated(terrascript.Data): pass class vcd_network_isolated_v2(terrascript.Data): pass class vcd_network_routed(terrascript.Data): pass class vcd_network_routed_v2(terrascript.Data): pass class vcd_nsxt_app_port_profile(terrascript.Data): pass class vcd_nsxt_edge_cluster(terrascript.Data): pass class vcd_nsxt_edgegateway(terrascript.Data): pass class vcd_nsxt_firewall(terrascript.Data): pass class vcd_nsxt_ip_set(terrascript.Data): pass class vcd_nsxt_ipsec_vpn_tunnel(terrascript.Data): pass class vcd_nsxt_manager(terrascript.Data): pass class vcd_nsxt_nat_rule(terrascript.Data): pass class vcd_nsxt_network_dhcp(terrascript.Data): pass class vcd_nsxt_network_imported(terrascript.Data): pass class vcd_nsxt_security_group(terrascript.Data): pass class vcd_nsxt_tier0_router(terrascript.Data): pass class vcd_nsxv_dhcp_relay(terrascript.Data): pass class vcd_nsxv_dnat(terrascript.Data): pass class vcd_nsxv_firewall_rule(terrascript.Data): pass class vcd_nsxv_ip_set(terrascript.Data): pass class vcd_nsxv_snat(terrascript.Data): pass class vcd_org(terrascript.Data): pass class vcd_org_user(terrascript.Data): pass class vcd_org_vdc(terrascript.Data): pass class vcd_portgroup(terrascript.Data): pass class vcd_resource_list(terrascript.Data): pass class vcd_resource_schema(terrascript.Data): pass class vcd_right(terrascript.Data): pass class vcd_rights_bundle(terrascript.Data): pass class vcd_role(terrascript.Data): pass class vcd_storage_profile(terrascript.Data): pass class vcd_vapp(terrascript.Data): pass class vcd_vapp_network(terrascript.Data): pass class vcd_vapp_org_network(terrascript.Data): pass class vcd_vapp_vm(terrascript.Data): pass class vcd_vcenter(terrascript.Data): pass class vcd_vm(terrascript.Data): pass class vcd_vm_affinity_rule(terrascript.Data): pass class vcd_vm_sizing_policy(terrascript.Data): pass
13.833333
79
0.760339
412
3,071
5.371359
0.208738
0.191595
0.455038
0.56394
0.793041
0.732038
0.23859
0
0
0
0
0.001949
0.164442
3,071
221
80
13.895928
0.860483
0.022143
0
0.477477
1
0
0.013
0
0
0
0
0
0
1
0
true
0.477477
0.027027
0
0.504505
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
7
202b13c8fa56488f7044f79ea60dded408f17f7f
6,110
py
Python
tests/worker/test_diamond_miner_inner_pipeline.py
dioptra-io/iris
1a7dfb8210fdc0f0554b61b81cbfdba7872f9d39
[ "MIT" ]
6
2022-01-13T16:09:57.000Z
2022-03-26T08:39:47.000Z
tests/worker/test_diamond_miner_inner_pipeline.py
dioptra-io/iris
1a7dfb8210fdc0f0554b61b81cbfdba7872f9d39
[ "MIT" ]
16
2022-02-01T06:09:13.000Z
2022-03-01T06:12:30.000Z
tests/worker/test_diamond_miner_inner_pipeline.py
dioptra-io/iris
1a7dfb8210fdc0f0554b61b81cbfdba7872f9d39
[ "MIT" ]
null
null
null
from uuid import uuid4 from iris.commons.models.diamond_miner import ToolParameters from iris.commons.models.round import Round from iris.commons.test import compress_file, decompress_file from iris.worker.inner_pipeline import diamond_miner_inner_pipeline targets_content = "1.0.0.0/23,icmp,0,32,6\n2001::/63,icmp6,0,32,6" async def test_default_inner_pipeline_round_1_0(clickhouse, logger, tmp_path): measurement_uuid = str(uuid4()) agent_uuid = str(uuid4()) await clickhouse.create_tables(measurement_uuid, agent_uuid, 24, 64, drop=True) probes_filepath = tmp_path / "probes.csv.zst" targets_filepath = tmp_path / "targets.csv" targets_filepath.write_text(targets_content) n_probes = await diamond_miner_inner_pipeline( clickhouse=clickhouse, logger=logger, measurement_uuid=measurement_uuid, agent_uuid=agent_uuid, agent_min_ttl=0, sliding_window_stopping_condition=3, tool_parameters=ToolParameters(), results_filepath=None, targets_filepath=targets_filepath, probes_filepath=probes_filepath, previous_round=None, next_round=Round(number=1, limit=10, offset=0), max_open_files=128, ) probes_filepath = decompress_file(probes_filepath) probes = probes_filepath.read_text().split() assert len(probes) == n_probes == 240 async def test_default_inner_pipeline_round_1_1_no_results( clickhouse, logger, tmp_path ): measurement_uuid = str(uuid4()) agent_uuid = str(uuid4()) await clickhouse.create_tables(measurement_uuid, agent_uuid, 24, 64, drop=True) probes_filepath = tmp_path / "probes.csv.zst" targets_filepath = tmp_path / "targets.csv" targets_filepath.write_text(targets_content) n_probes = await diamond_miner_inner_pipeline( clickhouse=clickhouse, logger=logger, measurement_uuid=measurement_uuid, agent_uuid=agent_uuid, agent_min_ttl=0, sliding_window_stopping_condition=3, tool_parameters=ToolParameters(), # NOTE: here we do not insert any results, since we probed up to # TTL 10 during the previous round, and that the stopping condition # is 3 stars, we should not get any more probes. results_filepath=None, targets_filepath=targets_filepath, probes_filepath=probes_filepath, previous_round=Round(number=1, limit=10, offset=0), next_round=Round(number=1, limit=10, offset=1), max_open_files=128, ) assert n_probes == 0 assert not probes_filepath.exists() async def test_default_inner_pipeline_round_1_1_results(clickhouse, logger, tmp_path): measurement_uuid = str(uuid4()) agent_uuid = str(uuid4()) await clickhouse.create_tables(measurement_uuid, agent_uuid, 24, 64, drop=True) probes_filepath = tmp_path / "probes.csv.zst" results_filepath = tmp_path / "results.csv" results_filepath.write_text( """capture_timestamp,probe_protocol,probe_src_addr,probe_dst_addr,probe_src_port,probe_dst_port,probe_ttl,quoted_ttl,reply_src_addr,reply_protocol,reply_icmp_type,reply_icmp_code,reply_ttl,reply_size,reply_mpls_labels,rtt,round 1640006077,1,::ffff:172.17.0.2,::ffff:1.0.0.1,24000,0,9,1,::ffff:80.80.80.80,1,11,0,64,59,"[]",1,1 1640006077,58,2002::1,2001::1,24000,0,9,1,2003::1,58,3,0,64,59,"[]",1,1 """ ) results_filepath = compress_file(results_filepath) targets_filepath = tmp_path / "targets.csv" targets_filepath.write_text(targets_content) n_probes = await diamond_miner_inner_pipeline( clickhouse=clickhouse, logger=logger, measurement_uuid=measurement_uuid, agent_uuid=agent_uuid, agent_min_ttl=0, sliding_window_stopping_condition=3, tool_parameters=ToolParameters(), # NOTE: here we insert results, so we should get probes from TTL 10 to 20, # only for 1.0.0.0/24 since we did not insert results for 1.0.1.0/24. # Same for 2001::/64 results_filepath=results_filepath, targets_filepath=targets_filepath, probes_filepath=probes_filepath, previous_round=Round(number=1, limit=10, offset=0), next_round=Round(number=1, limit=10, offset=1), max_open_files=128, ) decompress_file(probes_filepath, probes_filepath.with_suffix(".csv")) probes = probes_filepath.with_suffix(".csv").read_text().split() assert len(probes) == n_probes == 120 async def test_default_inner_pipeline_round_2(clickhouse, logger, tmp_path): measurement_uuid = str(uuid4()) agent_uuid = str(uuid4()) await clickhouse.create_tables(measurement_uuid, agent_uuid, 24, 64, drop=True) probes_filepath = tmp_path / "probes.csv.zst" results_filepath = tmp_path / "results.csv" results_filepath.write_text( """capture_timestamp,probe_protocol,probe_src_addr,probe_dst_addr,probe_src_port,probe_dst_port,probe_ttl,quoted_ttl,reply_src_addr,reply_protocol,reply_icmp_type,reply_icmp_code,reply_ttl,reply_size,reply_mpls_labels,rtt,round 1640006077,1,::ffff:172.17.0.2,::ffff:1.0.0.1,24000,0,9,1,::ffff:80.80.80.80,1,11,0,64,59,"[]",1,1 1640006077,58,2002::1,2001::1,24000,0,9,1,2003::1,58,3,0,64,59,"[]",1,1 """ ) results_filepath = compress_file(results_filepath) targets_filepath = tmp_path / "targets.csv" targets_filepath.write_text(targets_content) n_probes = await diamond_miner_inner_pipeline( clickhouse=clickhouse, logger=logger, measurement_uuid=measurement_uuid, agent_uuid=agent_uuid, agent_min_ttl=2, sliding_window_stopping_condition=3, tool_parameters=ToolParameters(), results_filepath=results_filepath, targets_filepath=targets_filepath, probes_filepath=probes_filepath, previous_round=Round(number=1, limit=10, offset=1), next_round=Round(number=2, limit=0, offset=0), max_open_files=128, ) # No load-balancing, so Diamond-Miner should stop here. assert n_probes == 0 assert not probes_filepath.exists()
40.733333
235
0.721113
864
6,110
4.800926
0.165509
0.067502
0.037608
0.046287
0.851977
0.824012
0.815092
0.806172
0.771456
0.731919
0
0.063344
0.175777
6,110
149
236
41.006711
0.760326
0.063666
0
0.745614
0
0.008772
0.035853
0.009371
0
0
0
0
0.052632
1
0
false
0
0.04386
0
0.04386
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
204c0931e7b88dc06094efd4f589b96810de469e
165
py
Python
zgw_consumers/api_models/compat.py
maykinmedia/zgw-consumers
9b0759d9b7c3590b245004afd4c5e5474785bf91
[ "MIT" ]
2
2021-04-25T11:29:33.000Z
2022-03-08T14:06:58.000Z
zgw_consumers/api_models/compat.py
maykinmedia/zgw-consumers
9b0759d9b7c3590b245004afd4c5e5474785bf91
[ "MIT" ]
27
2020-04-01T07:33:02.000Z
2022-03-14T09:11:05.000Z
zgw_consumers/api_models/compat.py
maykinmedia/zgw-consumers
9b0759d9b7c3590b245004afd4c5e5474785bf91
[ "MIT" ]
2
2020-07-30T15:40:47.000Z
2020-11-30T10:56:29.000Z
try: from relativedeltafield.utils import parse_relativedelta except ImportError: # before 1.1.2 from relativedeltafield import parse_relativedelta # noqa
33
62
0.8
19
165
6.842105
0.684211
0.338462
0.369231
0
0
0
0
0
0
0
0
0.021583
0.157576
165
4
63
41.25
0.913669
0.10303
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
647c9b4279797820a4d46aea88f45c155ec64baa
129
py
Python
cabot/cabotapp/utils.py
boringusername99/cabot
56cfed43c006e145931f46cb68e316fbaccf75cd
[ "MIT" ]
3,865
2015-01-01T11:37:14.000Z
2022-03-30T01:02:50.000Z
cabot/cabotapp/utils.py
boringusername99/cabot
56cfed43c006e145931f46cb68e316fbaccf75cd
[ "MIT" ]
550
2015-01-02T18:06:08.000Z
2021-11-04T23:39:47.000Z
cabot/cabotapp/utils.py
boringusername99/cabot
56cfed43c006e145931f46cb68e316fbaccf75cd
[ "MIT" ]
598
2015-01-22T12:17:53.000Z
2022-03-25T17:32:21.000Z
from django.contrib.auth import get_user_model def cabot_needs_setup(): return not get_user_model().objects.all().exists()
21.5
54
0.775194
20
129
4.7
0.85
0.148936
0.255319
0
0
0
0
0
0
0
0
0
0.116279
129
5
55
25.8
0.824561
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0.333333
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
1
1
0
0
8
64810f006dbf492f8a5577c3f60c47b36d5e4ad0
5,664
py
Python
jmpMacDict.py
Nibble-Knowledge/macro-assembler
6c396625ef57edac0006a040ecce1c1eb534d488
[ "Unlicense" ]
null
null
null
jmpMacDict.py
Nibble-Knowledge/macro-assembler
6c396625ef57edac0006a040ecce1c1eb534d488
[ "Unlicense" ]
null
null
null
jmpMacDict.py
Nibble-Knowledge/macro-assembler
6c396625ef57edac0006a040ecce1c1eb534d488
[ "Unlicense" ]
null
null
null
#!/usr/bin/python #Note for future modifications: 2's complement compare flag #(the XOR of the sign and overflow flags) being 1 means that #the subtraction result was strictly less than zero jmpMac = dict() jmpMac["JMPEQ"] = """\ XOR $op1 $op2 INTO macro[2] LOD macro[2] JMP $dest""" jmpMac["JMPNE"] = """\ XOR $op1 $op2 INTO macro[2] LOD macro[2] LOGNOT ACC JMP $dest""" #inverse of JMPLE jmpMac["JMPG"] = """\ USTC ACC CMPC $op1 $op2 GETCMP ACC LOGNOT ACC JMP $dest""" #inverse of JMPGE jmpMac["JMPL"] = """\ USTC ACC CMPC $op2 $op1 GETCMP ACC LOGNOT ACC JMP $dest""" #Subtract op1 from op2 #if CMP is 0, result was 0 or greater #thus op1 was GE to op2 jmpMac["JMPGE"] = """\ USTC ACC CMPC $op2 $op1 GETCMP ACC JMP $dest""" #subtract op2 from op1 #if CMP is 0, result was 0 or greater #so thus op2 was less than or equal to op1 jmpMac["JMPLE"] = """\ USTC ACC CMPC $op1 $op2 GETCMP ACC JMP $dest""" #larger jump macros. jmpMac["JMPEQ8"] = """\ XOR8 $op1 $op2 INTO macro[2] OR macro[2] macro[3] INTO macro[1] LOD macro[1] JMP $dest""" jmpMac["JMPNE8"] = """\ XOR8 $op1 $op2 INTO macro[2] OR macro[3] macro[2] INTO macro[1] LOD macro[1] LOGNOT ACC JMP $dest""" jmpMac["JMPEQ16"] = """\ XOR16 $op1 $op2 INTO macro[2] OR macro[2] macro[5] INTO macro[2] OR macro[2] macro[4] INTO macro[2] OR macro[2] macro[3] INTO macro[2] LOD macro[2] JMP $dest""" jmpMac["JMPNE16"] = """\ XOR16 $op1 $op2 INTO macro[2] OR macro[2] macro[5] INTO macro[2] OR macro[2] macro[4] INTO macro[2] OR macro[2] macro[3] INTO macro[2] LOD macro[2] LOGNOT ACC JMP $dest""" jmpMac["JMPEQ32"] = """\ XOR32 $op1 $op2 INTO macro[2] OR macro[2] macro[9] INTO macro[2] OR macro[2] macro[8] INTO macro[2] OR macro[2] macro[7] INTO macro[2] OR macro[2] macro[6] INTO macro[2] OR macro[2] macro[5] INTO macro[2] OR macro[2] macro[4] INTO macro[2] OR macro[2] macro[3] INTO macro[2] LOD macro[2] JMP $dest""" jmpMac["JMPNE32"] = """\ XOR32 $op1 $op2 INTO macro[2] OR macro[2] macro[9] INTO macro[2] OR macro[2] macro[8] INTO macro[2] OR macro[2] macro[7] INTO macro[2] OR macro[2] macro[6] INTO macro[2] OR macro[2] macro[5] INTO macro[2] OR macro[2] macro[4] INTO macro[2] OR macro[2] macro[3] INTO macro[2] LOD macro[2] LOGNOT ACC JMP $dest""" jmpMac["JMPEQ64"] = """\ XOR64 $op1 $op2 INTO macro[2] OR macro[2] macro[11] INTO macro[2] OR macro[2] macro[10] INTO macro[2] OR macro[2] macro[F] INTO macro[2] OR macro[2] macro[E] INTO macro[2] OR macro[2] macro[D] INTO macro[2] OR macro[2] macro[C] INTO macro[2] OR macro[2] macro[B] INTO macro[2] OR macro[2] macro[A] INTO macro[2] OR macro[2] macro[9] INTO macro[2] OR macro[2] macro[8] INTO macro[2] OR macro[2] macro[7] INTO macro[2] OR macro[2] macro[6] INTO macro[2] OR macro[2] macro[5] INTO macro[2] OR macro[2] macro[4] INTO macro[2] OR macro[2] macro[3] INTO macro[2] LOD macro[2] JMP $dest""" jmpMac["JMPNE64"] = """\ XOR64 $op1 $op2 INTO macro[2] OR macro[2] macro[11] INTO macro[2] OR macro[2] macro[10] INTO macro[2] OR macro[2] macro[F] INTO macro[2] OR macro[2] macro[E] INTO macro[2] OR macro[2] macro[D] INTO macro[2] OR macro[2] macro[C] INTO macro[2] OR macro[2] macro[B] INTO macro[2] OR macro[2] macro[A] INTO macro[2] OR macro[2] macro[9] INTO macro[2] OR macro[2] macro[8] INTO macro[2] OR macro[2] macro[7] INTO macro[2] OR macro[2] macro[6] INTO macro[2] OR macro[2] macro[5] INTO macro[2] OR macro[2] macro[4] INTO macro[2] OR macro[2] macro[3] INTO macro[2] LOD macro[2] LOGNOT ACC JMP $dest""" jmpMac["JMPG8"] = """\ USTC ACC CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC LOGNOT ACC JMP $dest""" jmpMac["JMPL8"] = """\ JMPG8 $op2 $op1 TO $dest """ jmpMac["JMPLE8"] = """\ USTC ACC CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC JMP $dest""" jmpMac["JMPGE8"] = """\ JMPLE8 $op2 $op1 TO $dest""" jmpMac["JMPG16"] = """\ USTC ACC CMPC $op1[3] $op2[3] CMPC $op1[2] $op2[2] CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC LOGNOT ACC JMP $dest """ jmpMac["JMPL16"] = """\ JMPG16 $op2 $op1 TO $dest """ jmpMac["JMPLE16"] = """\ USTC ACC CMPC $op1[3] $op2[3] CMPC $op1[2] $op2[2] CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC JMP $dest""" jmpMac["JMPGE16"] = """\ JMPL16 $op2 $op1 TO $dest""" jmpMac["JMPL32"] = """\ JMPG32 $op2 $op1 TO $dest""" jmpMac["JMPG32"] = """\ USTC ACC CMPC $op1[7] $op2[7] CMPC $op1[6] $op2[6] CMPC $op1[5] $op2[5] CMPC $op1[4] $op2[4] CMPC $op1[3] $op2[3] CMPC $op1[2] $op2[2] CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC LOGNOT ACC JMP $dest """ jmpMac["JMPLE32"] = """\ USTC ACC CMPC $op1[7] $op2[7] CMPC $op1[6] $op2[6] CMPC $op1[5] $op2[5] CMPC $op1[4] $op2[4] CMPC $op1[3] $op2[3] CMPC $op1[2] $op2[2] CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC JMP $dest""" jmpMac["JMPGE32"] = """\ JMPLE32 $op2 $op1 TO $dest""" jmpMac["JMPL64"] = """\ JMPG64 $op2 $op1 TO $dest""" jmpMac["JMPG64"] = """\ USTC ACC CMPC $op1[F] $op2[F] CMPC $op1[E] $op2[E] CMPC $op1[D] $op2[D] CMPC $op1[C] $op2[C] CMPC $op1[B] $op2[B] CMPC $op1[A] $op2[A] CMPC $op1[9] $op2[9] CMPC $op1[8] $op2[8] CMPC $op1[7] $op2[7] CMPC $op1[6] $op2[6] CMPC $op1[5] $op2[5] CMPC $op1[4] $op2[4] CMPC $op1[3] $op2[3] CMPC $op1[2] $op2[2] CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC LOGNOT ACC JMP $dest""" jmpMac["JMPLE64"] = """\ USTC ACC CMPC $op1[F] $op2[F] CMPC $op1[E] $op2[E] CMPC $op1[D] $op2[D] CMPC $op1[C] $op2[C] CMPC $op1[B] $op2[B] CMPC $op1[A] $op2[A] CMPC $op1[9] $op2[9] CMPC $op1[8] $op2[8] CMPC $op1[7] $op2[7] CMPC $op1[6] $op2[6] CMPC $op1[5] $op2[5] CMPC $op1[4] $op2[4] CMPC $op1[3] $op2[3] CMPC $op1[2] $op2[2] CMPC $op1[1] $op2[1] CMPC $op1[0] $op2[0] GETCMP ACC JMP $dest""" jmpMac["JMPGE64"] = """\ JMPLE64 $op2 $op1 TO $dest"""
19.735192
60
0.63577
1,117
5,664
3.223814
0.098478
0.199944
0.16662
0.173285
0.837823
0.796723
0.774785
0.745904
0.737295
0.706748
0
0.107248
0.157133
5,664
286
61
19.804196
0.647047
0.072564
0
0.826271
0
0
0.873067
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
b3b5cabfb46816ab80797bdaeafa1f7285229554
31,040
py
Python
sdk/python/pulumi_yandex/alb_load_balancer.py
pulumi/pulumi-yandex
559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e
[ "ECL-2.0", "Apache-2.0" ]
9
2021-04-20T15:39:41.000Z
2022-02-20T09:14:39.000Z
sdk/python/pulumi_yandex/alb_load_balancer.py
pulumi/pulumi-yandex
559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e
[ "ECL-2.0", "Apache-2.0" ]
56
2021-04-20T11:31:03.000Z
2022-03-31T15:53:06.000Z
sdk/python/pulumi_yandex/alb_load_balancer.py
pulumi/pulumi-yandex
559a0c82fd2b834bb5f1dc3abbf0dab689b13a3e
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from . import _utilities from . import outputs from ._inputs import * __all__ = ['AlbLoadBalancerArgs', 'AlbLoadBalancer'] @pulumi.input_type class AlbLoadBalancerArgs: def __init__(__self__, *, allocation_policy: pulumi.Input['AlbLoadBalancerAllocationPolicyArgs'], network_id: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None, folder_id: Optional[pulumi.Input[str]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, listeners: Optional[pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]]] = None, name: Optional[pulumi.Input[str]] = None, region_id: Optional[pulumi.Input[str]] = None, security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None): """ The set of arguments for constructing a AlbLoadBalancer resource. :param pulumi.Input['AlbLoadBalancerAllocationPolicyArgs'] allocation_policy: Allocation zones for the Load Balancer instance. The structure is documented below. :param pulumi.Input[str] network_id: ID of the network that the Load Balancer is located at. :param pulumi.Input[str] description: An optional description of the Load Balancer. :param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs. If omitted, the provider folder is used. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this Load Balancer. A list of key/value pairs. :param pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]] listeners: List of listeners for the Load Balancer. The structure is documented below. :param pulumi.Input[str] name: name of SNI match. :param pulumi.Input[str] region_id: ID of the region that the Load Balancer is located at. :param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of ID's of security groups attached to the Load Balancer. """ pulumi.set(__self__, "allocation_policy", allocation_policy) pulumi.set(__self__, "network_id", network_id) if description is not None: pulumi.set(__self__, "description", description) if folder_id is not None: pulumi.set(__self__, "folder_id", folder_id) if labels is not None: pulumi.set(__self__, "labels", labels) if listeners is not None: pulumi.set(__self__, "listeners", listeners) if name is not None: pulumi.set(__self__, "name", name) if region_id is not None: pulumi.set(__self__, "region_id", region_id) if security_group_ids is not None: pulumi.set(__self__, "security_group_ids", security_group_ids) @property @pulumi.getter(name="allocationPolicy") def allocation_policy(self) -> pulumi.Input['AlbLoadBalancerAllocationPolicyArgs']: """ Allocation zones for the Load Balancer instance. The structure is documented below. """ return pulumi.get(self, "allocation_policy") @allocation_policy.setter def allocation_policy(self, value: pulumi.Input['AlbLoadBalancerAllocationPolicyArgs']): pulumi.set(self, "allocation_policy", value) @property @pulumi.getter(name="networkId") def network_id(self) -> pulumi.Input[str]: """ ID of the network that the Load Balancer is located at. """ return pulumi.get(self, "network_id") @network_id.setter def network_id(self, value: pulumi.Input[str]): pulumi.set(self, "network_id", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: """ An optional description of the Load Balancer. """ return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @property @pulumi.getter(name="folderId") def folder_id(self) -> Optional[pulumi.Input[str]]: """ The ID of the folder to which the resource belongs. If omitted, the provider folder is used. """ return pulumi.get(self, "folder_id") @folder_id.setter def folder_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "folder_id", value) @property @pulumi.getter def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Labels to assign to this Load Balancer. A list of key/value pairs. """ return pulumi.get(self, "labels") @labels.setter def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "labels", value) @property @pulumi.getter def listeners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]]]: """ List of listeners for the Load Balancer. The structure is documented below. """ return pulumi.get(self, "listeners") @listeners.setter def listeners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]]]): pulumi.set(self, "listeners", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ name of SNI match. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="regionId") def region_id(self) -> Optional[pulumi.Input[str]]: """ ID of the region that the Load Balancer is located at. """ return pulumi.get(self, "region_id") @region_id.setter def region_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "region_id", value) @property @pulumi.getter(name="securityGroupIds") def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ A list of ID's of security groups attached to the Load Balancer. """ return pulumi.get(self, "security_group_ids") @security_group_ids.setter def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "security_group_ids", value) @pulumi.input_type class _AlbLoadBalancerState: def __init__(__self__, *, allocation_policy: Optional[pulumi.Input['AlbLoadBalancerAllocationPolicyArgs']] = None, created_at: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, folder_id: Optional[pulumi.Input[str]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, listeners: Optional[pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]]] = None, log_group_id: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, network_id: Optional[pulumi.Input[str]] = None, region_id: Optional[pulumi.Input[str]] = None, security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, status: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering AlbLoadBalancer resources. :param pulumi.Input['AlbLoadBalancerAllocationPolicyArgs'] allocation_policy: Allocation zones for the Load Balancer instance. The structure is documented below. :param pulumi.Input[str] created_at: The Load Balancer creation timestamp. :param pulumi.Input[str] description: An optional description of the Load Balancer. :param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs. If omitted, the provider folder is used. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this Load Balancer. A list of key/value pairs. :param pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]] listeners: List of listeners for the Load Balancer. The structure is documented below. :param pulumi.Input[str] log_group_id: Cloud log group used by the Load Balancer to store access logs. :param pulumi.Input[str] name: name of SNI match. :param pulumi.Input[str] network_id: ID of the network that the Load Balancer is located at. :param pulumi.Input[str] region_id: ID of the region that the Load Balancer is located at. :param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of ID's of security groups attached to the Load Balancer. :param pulumi.Input[str] status: Status of the Load Balancer. """ if allocation_policy is not None: pulumi.set(__self__, "allocation_policy", allocation_policy) if created_at is not None: pulumi.set(__self__, "created_at", created_at) if description is not None: pulumi.set(__self__, "description", description) if folder_id is not None: pulumi.set(__self__, "folder_id", folder_id) if labels is not None: pulumi.set(__self__, "labels", labels) if listeners is not None: pulumi.set(__self__, "listeners", listeners) if log_group_id is not None: pulumi.set(__self__, "log_group_id", log_group_id) if name is not None: pulumi.set(__self__, "name", name) if network_id is not None: pulumi.set(__self__, "network_id", network_id) if region_id is not None: pulumi.set(__self__, "region_id", region_id) if security_group_ids is not None: pulumi.set(__self__, "security_group_ids", security_group_ids) if status is not None: pulumi.set(__self__, "status", status) @property @pulumi.getter(name="allocationPolicy") def allocation_policy(self) -> Optional[pulumi.Input['AlbLoadBalancerAllocationPolicyArgs']]: """ Allocation zones for the Load Balancer instance. The structure is documented below. """ return pulumi.get(self, "allocation_policy") @allocation_policy.setter def allocation_policy(self, value: Optional[pulumi.Input['AlbLoadBalancerAllocationPolicyArgs']]): pulumi.set(self, "allocation_policy", value) @property @pulumi.getter(name="createdAt") def created_at(self) -> Optional[pulumi.Input[str]]: """ The Load Balancer creation timestamp. """ return pulumi.get(self, "created_at") @created_at.setter def created_at(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "created_at", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: """ An optional description of the Load Balancer. """ return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @property @pulumi.getter(name="folderId") def folder_id(self) -> Optional[pulumi.Input[str]]: """ The ID of the folder to which the resource belongs. If omitted, the provider folder is used. """ return pulumi.get(self, "folder_id") @folder_id.setter def folder_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "folder_id", value) @property @pulumi.getter def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Labels to assign to this Load Balancer. A list of key/value pairs. """ return pulumi.get(self, "labels") @labels.setter def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "labels", value) @property @pulumi.getter def listeners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]]]: """ List of listeners for the Load Balancer. The structure is documented below. """ return pulumi.get(self, "listeners") @listeners.setter def listeners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AlbLoadBalancerListenerArgs']]]]): pulumi.set(self, "listeners", value) @property @pulumi.getter(name="logGroupId") def log_group_id(self) -> Optional[pulumi.Input[str]]: """ Cloud log group used by the Load Balancer to store access logs. """ return pulumi.get(self, "log_group_id") @log_group_id.setter def log_group_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "log_group_id", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ name of SNI match. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="networkId") def network_id(self) -> Optional[pulumi.Input[str]]: """ ID of the network that the Load Balancer is located at. """ return pulumi.get(self, "network_id") @network_id.setter def network_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "network_id", value) @property @pulumi.getter(name="regionId") def region_id(self) -> Optional[pulumi.Input[str]]: """ ID of the region that the Load Balancer is located at. """ return pulumi.get(self, "region_id") @region_id.setter def region_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "region_id", value) @property @pulumi.getter(name="securityGroupIds") def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ A list of ID's of security groups attached to the Load Balancer. """ return pulumi.get(self, "security_group_ids") @security_group_ids.setter def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "security_group_ids", value) @property @pulumi.getter def status(self) -> Optional[pulumi.Input[str]]: """ Status of the Load Balancer. """ return pulumi.get(self, "status") @status.setter def status(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "status", value) class AlbLoadBalancer(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, allocation_policy: Optional[pulumi.Input[pulumi.InputType['AlbLoadBalancerAllocationPolicyArgs']]] = None, description: Optional[pulumi.Input[str]] = None, folder_id: Optional[pulumi.Input[str]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, listeners: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AlbLoadBalancerListenerArgs']]]]] = None, name: Optional[pulumi.Input[str]] = None, network_id: Optional[pulumi.Input[str]] = None, region_id: Optional[pulumi.Input[str]] = None, security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, __props__=None): """ Creates an Application Load Balancer in the specified folder. For more information, see [the official documentation](https://cloud.yandex.com/en/docs/application-load-balancer/concepts/application-load-balancer) . ## Example Usage ```python import pulumi import pulumi_yandex as yandex test_balancer = yandex.AlbLoadBalancer("test-balancer", network_id=yandex_vpc_network["test-network"]["id"], allocation_policy=yandex.AlbLoadBalancerAllocationPolicyArgs( locations=[yandex.AlbLoadBalancerAllocationPolicyLocationArgs( zone_id="ru-central1-a", subnet_id=yandex_vpc_subnet["test-subnet"]["id"], )], ), listeners=[yandex.AlbLoadBalancerListenerArgs( name="my-listener", endpoints=[yandex.AlbLoadBalancerListenerEndpointArgs( addresses=[yandex.AlbLoadBalancerListenerEndpointAddressArgs( external_ipv4_address=yandex.AlbLoadBalancerListenerEndpointAddressExternalIpv4AddressArgs(), )], ports=[8080], )], http=yandex.AlbLoadBalancerListenerHttpArgs( handler=yandex.AlbLoadBalancerListenerHttpHandlerArgs( http_router_id=yandex_alb_http_router["test-router"]["id"], ), ), )]) ``` ## Import An Application Load Balancer can be imported using the `id` of the resource, e.g. ```sh $ pulumi import yandex:index/albLoadBalancer:AlbLoadBalancer default load_balancer_id ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[pulumi.InputType['AlbLoadBalancerAllocationPolicyArgs']] allocation_policy: Allocation zones for the Load Balancer instance. The structure is documented below. :param pulumi.Input[str] description: An optional description of the Load Balancer. :param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs. If omitted, the provider folder is used. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this Load Balancer. A list of key/value pairs. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AlbLoadBalancerListenerArgs']]]] listeners: List of listeners for the Load Balancer. The structure is documented below. :param pulumi.Input[str] name: name of SNI match. :param pulumi.Input[str] network_id: ID of the network that the Load Balancer is located at. :param pulumi.Input[str] region_id: ID of the region that the Load Balancer is located at. :param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of ID's of security groups attached to the Load Balancer. """ ... @overload def __init__(__self__, resource_name: str, args: AlbLoadBalancerArgs, opts: Optional[pulumi.ResourceOptions] = None): """ Creates an Application Load Balancer in the specified folder. For more information, see [the official documentation](https://cloud.yandex.com/en/docs/application-load-balancer/concepts/application-load-balancer) . ## Example Usage ```python import pulumi import pulumi_yandex as yandex test_balancer = yandex.AlbLoadBalancer("test-balancer", network_id=yandex_vpc_network["test-network"]["id"], allocation_policy=yandex.AlbLoadBalancerAllocationPolicyArgs( locations=[yandex.AlbLoadBalancerAllocationPolicyLocationArgs( zone_id="ru-central1-a", subnet_id=yandex_vpc_subnet["test-subnet"]["id"], )], ), listeners=[yandex.AlbLoadBalancerListenerArgs( name="my-listener", endpoints=[yandex.AlbLoadBalancerListenerEndpointArgs( addresses=[yandex.AlbLoadBalancerListenerEndpointAddressArgs( external_ipv4_address=yandex.AlbLoadBalancerListenerEndpointAddressExternalIpv4AddressArgs(), )], ports=[8080], )], http=yandex.AlbLoadBalancerListenerHttpArgs( handler=yandex.AlbLoadBalancerListenerHttpHandlerArgs( http_router_id=yandex_alb_http_router["test-router"]["id"], ), ), )]) ``` ## Import An Application Load Balancer can be imported using the `id` of the resource, e.g. ```sh $ pulumi import yandex:index/albLoadBalancer:AlbLoadBalancer default load_balancer_id ``` :param str resource_name: The name of the resource. :param AlbLoadBalancerArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(AlbLoadBalancerArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, allocation_policy: Optional[pulumi.Input[pulumi.InputType['AlbLoadBalancerAllocationPolicyArgs']]] = None, description: Optional[pulumi.Input[str]] = None, folder_id: Optional[pulumi.Input[str]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, listeners: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AlbLoadBalancerListenerArgs']]]]] = None, name: Optional[pulumi.Input[str]] = None, network_id: Optional[pulumi.Input[str]] = None, region_id: Optional[pulumi.Input[str]] = None, security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = AlbLoadBalancerArgs.__new__(AlbLoadBalancerArgs) if allocation_policy is None and not opts.urn: raise TypeError("Missing required property 'allocation_policy'") __props__.__dict__["allocation_policy"] = allocation_policy __props__.__dict__["description"] = description __props__.__dict__["folder_id"] = folder_id __props__.__dict__["labels"] = labels __props__.__dict__["listeners"] = listeners __props__.__dict__["name"] = name if network_id is None and not opts.urn: raise TypeError("Missing required property 'network_id'") __props__.__dict__["network_id"] = network_id __props__.__dict__["region_id"] = region_id __props__.__dict__["security_group_ids"] = security_group_ids __props__.__dict__["created_at"] = None __props__.__dict__["log_group_id"] = None __props__.__dict__["status"] = None super(AlbLoadBalancer, __self__).__init__( 'yandex:index/albLoadBalancer:AlbLoadBalancer', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, allocation_policy: Optional[pulumi.Input[pulumi.InputType['AlbLoadBalancerAllocationPolicyArgs']]] = None, created_at: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, folder_id: Optional[pulumi.Input[str]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, listeners: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AlbLoadBalancerListenerArgs']]]]] = None, log_group_id: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, network_id: Optional[pulumi.Input[str]] = None, region_id: Optional[pulumi.Input[str]] = None, security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, status: Optional[pulumi.Input[str]] = None) -> 'AlbLoadBalancer': """ Get an existing AlbLoadBalancer resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[pulumi.InputType['AlbLoadBalancerAllocationPolicyArgs']] allocation_policy: Allocation zones for the Load Balancer instance. The structure is documented below. :param pulumi.Input[str] created_at: The Load Balancer creation timestamp. :param pulumi.Input[str] description: An optional description of the Load Balancer. :param pulumi.Input[str] folder_id: The ID of the folder to which the resource belongs. If omitted, the provider folder is used. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to assign to this Load Balancer. A list of key/value pairs. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AlbLoadBalancerListenerArgs']]]] listeners: List of listeners for the Load Balancer. The structure is documented below. :param pulumi.Input[str] log_group_id: Cloud log group used by the Load Balancer to store access logs. :param pulumi.Input[str] name: name of SNI match. :param pulumi.Input[str] network_id: ID of the network that the Load Balancer is located at. :param pulumi.Input[str] region_id: ID of the region that the Load Balancer is located at. :param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of ID's of security groups attached to the Load Balancer. :param pulumi.Input[str] status: Status of the Load Balancer. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _AlbLoadBalancerState.__new__(_AlbLoadBalancerState) __props__.__dict__["allocation_policy"] = allocation_policy __props__.__dict__["created_at"] = created_at __props__.__dict__["description"] = description __props__.__dict__["folder_id"] = folder_id __props__.__dict__["labels"] = labels __props__.__dict__["listeners"] = listeners __props__.__dict__["log_group_id"] = log_group_id __props__.__dict__["name"] = name __props__.__dict__["network_id"] = network_id __props__.__dict__["region_id"] = region_id __props__.__dict__["security_group_ids"] = security_group_ids __props__.__dict__["status"] = status return AlbLoadBalancer(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="allocationPolicy") def allocation_policy(self) -> pulumi.Output['outputs.AlbLoadBalancerAllocationPolicy']: """ Allocation zones for the Load Balancer instance. The structure is documented below. """ return pulumi.get(self, "allocation_policy") @property @pulumi.getter(name="createdAt") def created_at(self) -> pulumi.Output[str]: """ The Load Balancer creation timestamp. """ return pulumi.get(self, "created_at") @property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: """ An optional description of the Load Balancer. """ return pulumi.get(self, "description") @property @pulumi.getter(name="folderId") def folder_id(self) -> pulumi.Output[str]: """ The ID of the folder to which the resource belongs. If omitted, the provider folder is used. """ return pulumi.get(self, "folder_id") @property @pulumi.getter def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ Labels to assign to this Load Balancer. A list of key/value pairs. """ return pulumi.get(self, "labels") @property @pulumi.getter def listeners(self) -> pulumi.Output[Optional[Sequence['outputs.AlbLoadBalancerListener']]]: """ List of listeners for the Load Balancer. The structure is documented below. """ return pulumi.get(self, "listeners") @property @pulumi.getter(name="logGroupId") def log_group_id(self) -> pulumi.Output[str]: """ Cloud log group used by the Load Balancer to store access logs. """ return pulumi.get(self, "log_group_id") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ name of SNI match. """ return pulumi.get(self, "name") @property @pulumi.getter(name="networkId") def network_id(self) -> pulumi.Output[str]: """ ID of the network that the Load Balancer is located at. """ return pulumi.get(self, "network_id") @property @pulumi.getter(name="regionId") def region_id(self) -> pulumi.Output[Optional[str]]: """ ID of the region that the Load Balancer is located at. """ return pulumi.get(self, "region_id") @property @pulumi.getter(name="securityGroupIds") def security_group_ids(self) -> pulumi.Output[Optional[Sequence[str]]]: """ A list of ID's of security groups attached to the Load Balancer. """ return pulumi.get(self, "security_group_ids") @property @pulumi.getter def status(self) -> pulumi.Output[str]: """ Status of the Load Balancer. """ return pulumi.get(self, "status")
44.855491
187
0.647197
3,526
31,040
5.499149
0.064663
0.10098
0.080144
0.061269
0.890098
0.868644
0.850542
0.838473
0.827746
0.810882
0
0.000642
0.247713
31,040
691
188
44.920405
0.829729
0.339691
0
0.739946
1
0
0.116522
0.036001
0
0
0
0
0
1
0.163539
false
0.002681
0.018767
0
0.281501
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
b3c074b9b754f73fca9497d3bcb9b7d905b5ecf9
1,488
py
Python
full_stack_assignment/weather/views.py
ganeshkhutwad/kisanhub-api
7d479731cb0ea540669e91716504ea9ae8b629fa
[ "MIT" ]
null
null
null
full_stack_assignment/weather/views.py
ganeshkhutwad/kisanhub-api
7d479731cb0ea540669e91716504ea9ae8b629fa
[ "MIT" ]
null
null
null
full_stack_assignment/weather/views.py
ganeshkhutwad/kisanhub-api
7d479731cb0ea540669e91716504ea9ae8b629fa
[ "MIT" ]
null
null
null
import json import os from rest_framework.response import Response from rest_framework.viewsets import ViewSet DATA_DIR = os.path.dirname(__file__) + '/../../data' class EnglandWeatherView(ViewSet): def list(self, request): metric = request.query_params['metric'] start_month = int(request.query_params['start_month']) start_year = int(request.query_params['start_year']) end_month = int(request.query_params['end_month']) end_year = int(request.query_params['end_year']) with open(os.path.join(DATA_DIR, metric, 'england/data.json')) as f: data = json.loads(f.read()) filtered_list = [x for x in data if x['month'] in range(start_month, end_month + 1) and (x['year'] == start_year or x['year'] == end_year)] return Response(filtered_list) class ScotlandWeatherView(ViewSet): def list(self, request): metric = request.query_params['metric'] start_month = int(request.query_params['start_month']) start_year = int(request.query_params['start_year']) end_month = int(request.query_params['end_month']) end_year = int(request.query_params['end_year']) with open(os.path.join(DATA_DIR, metric, 'scotland/data.json')) as f: data = json.loads(f.read()) filtered_list = [x for x in data if x['month'] in range(start_month, end_month + 1) and (x['year'] == start_year or x['year'] == end_year)] return Response(filtered_list)
39.157895
147
0.668011
209
1,488
4.5311
0.229665
0.126716
0.190074
0.177402
0.804646
0.804646
0.804646
0.804646
0.804646
0.804646
0
0.001674
0.196909
1,488
37
148
40.216216
0.790795
0
0
0.666667
0
0
0.107527
0
0
0
0
0
0
1
0.074074
false
0
0.148148
0
0.37037
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
3754945622d2d115e4d402b3633514b1248495aa
191
py
Python
storagebox/__init__.py
peter-emil/StorageBox
eca61de7eab84935a91e87f50476efa12b898fb3
[ "MIT" ]
4
2021-02-28T02:07:46.000Z
2021-04-06T12:44:19.000Z
storagebox/__init__.py
peter-emil/StorageBox
eca61de7eab84935a91e87f50476efa12b898fb3
[ "MIT" ]
null
null
null
storagebox/__init__.py
peter-emil/StorageBox
eca61de7eab84935a91e87f50476efa12b898fb3
[ "MIT" ]
null
null
null
from storagebox.repository.deduplication import DeduplicationDynamoDbRepository from storagebox.repository.item_bank import ItemBankDynamoDbRepository from storagebox.api import Deduplicator
47.75
79
0.910995
18
191
9.611111
0.611111
0.242775
0.277457
0
0
0
0
0
0
0
0
0
0.062827
191
3
80
63.666667
0.96648
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
37b683fce88f3b27583aab8ae04ee9ca77ddca13
1,993
py
Python
simbots/simbots/utils/builtInEntities.py
vaibhavrr1/simbots
9cc432c39e125962b37494be50a6ecf77aba7f39
[ "MIT" ]
26
2021-11-23T04:55:32.000Z
2021-12-12T12:02:52.000Z
simbots/simbots/utils/builtInEntities.py
vaibhavrr1/simbots
9cc432c39e125962b37494be50a6ecf77aba7f39
[ "MIT" ]
null
null
null
simbots/simbots/utils/builtInEntities.py
vaibhavrr1/simbots
9cc432c39e125962b37494be50a6ecf77aba7f39
[ "MIT" ]
null
null
null
class EntitySamples(): """ Contains samples for frequently used entities """ @staticmethod def greetingsHelper(): """ :return: {'wsup': [{'tag': 'case-insensitive','pattern': "\s[w]*[a]*[s]+[u]+[p]+\s",'type': 'regex'}],'hi': [{'tag': 'case-insensitive','pattern': "\s[h]+[i]+\s", 'type': 'regex'}],'hello': [{'tag': 'case-insensitive','pattern': "\s[h]+[e]+[l]+[o]+\s",'type': 'regex'}]} """ return {'wsup': [{'tag': 'case-insensitive', 'pattern': "\s[w]*[a]*[s]+[u]+[p]+\s", 'type': 'regex'}], 'hi': [{'tag': 'case-insensitive', 'pattern': "\s[h]+[i]+\s", 'type': 'regex'}], 'hello': [{'tag': 'case-insensitive', 'pattern': "\s[h]+[e]+[l]+[o]+\s", 'type': 'regex'}]} @staticmethod def laughterHelper(): """ :return: {'haha': [{'tag': 'case-insensitive','pattern': "\s(h+(a|e)+)+(h+)?\s",'type': 'regex'}],'happysmily': [{'tag': 'case-insensitive','pattern': "\s\:\)\s", 'type': 'regex'}]} """ return {'haha': [{'tag': 'case-insensitive', 'pattern': "\s(h+(a|e)+)+(h+)?\s", 'type': 'regex'}], 'happysmily': [{'tag': 'case-insensitive', 'pattern': "\s\:\)\s", 'type': 'regex'}]} @staticmethod def coolHelper(): """ :return: {'cool': [{'tag': 'case-insensitive','pattern': "\sc+oo+l+\s", 'type': 'regex'}]} """ return {'cool': [{'tag': 'case-insensitive','pattern': "\sc+oo+l+\s", 'type': 'regex'}]} @staticmethod def byeHelper(): """ :return: {'bye': [{'tag': 'case-insensitive','pattern': "\s(goo+d)?b+y+e+\s", 'type': 'regex'}]} """ return {'bye': [{'tag': 'case-insensitive','pattern': "\s(goo+d)?(b+u+)?b+y+e+\s", 'type': 'regex'}]}
36.236364
269
0.419468
200
1,993
4.18
0.22
0.117225
0.301435
0.41866
0.854067
0.800239
0.770335
0.770335
0.770335
0.770335
0
0
0.289513
1,993
55
270
36.236364
0.590395
0.34571
0
0.25
0
0
0.341947
0.042205
0
0
0
0
0
1
0.166667
true
0
0
0
0.375
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
9
8075c83e4824daddaeaec1802e68a548a975d135
30,578
py
Python
insights/combiners/tests/test_grub_conf.py
skateman/insights-core
e7cd3001ffc2558757b9e7759dbe27b8b29f4bac
[ "Apache-2.0" ]
1
2021-11-08T16:25:01.000Z
2021-11-08T16:25:01.000Z
insights/combiners/tests/test_grub_conf.py
ahitacat/insights-core
0ba58dbe5edceef0bd4a74c1caf6b826381ccda5
[ "Apache-2.0" ]
null
null
null
insights/combiners/tests/test_grub_conf.py
ahitacat/insights-core
0ba58dbe5edceef0bd4a74c1caf6b826381ccda5
[ "Apache-2.0" ]
null
null
null
from insights.combiners.grub_conf import GrubConf, BootLoaderEntries from insights.parsers.grub_conf import (Grub1Config, Grub2Config, Grub2EFIConfig, Grub1EFIConfig, BootLoaderEntries as BLE) from insights.parsers.grubenv import GrubEnv from insights.parsers.ls_sys_firmware import LsSysFirmware from insights.parsers.installed_rpms import InstalledRpms from insights.parsers.cmdline import CmdLine from insights.tests import context_wrap import pytest GRUB1_TEMPLATE = """ # grub.conf generated by anaconda # # Note that you do not have to rerun grub after making changes to this file # NOTICE: You have a /boot partition. This means that # all kernel and initrd paths are relative to /boot/, eg. # root (hd0,0) # kernel /vmlinuz-version ro root=/dev/mapper/VolGroup-lv_root # initrd /initrd-[generic-]version.img #boot=/dev/sda default=0 timeout=5 splashimage=(hd0,0)/grub/splash.xpm.gz hiddenmenu title Red Hat Enterprise Linux 6 (2.6.32-642.el6.x86_64) root (hd0,0) kernel /vmlinuz-2.6.32-642.el6.x86_64 {kernel_boot_options} ro root=/dev/mapper/VolGroup-lv_root intel_iommu=off rd_NO_LUKS LANG=en_US.UTF-8 rd_NO_MD rd_LVM_LV=VolGroup/lv_swap SYSFONT=latarcyrheb-sun16 crashkernel=auto rd_LVM_LV=VolGroup/lv_root KEYBOARDTYPE=pc KEYTABLE=us rd_NO_DM rhgb quiet initrd /initramfs-2.6.32-642.el6.x86_64.img title Red Hat Enterprise Linux 6 (2.6.32-642.el6.x86_64-2) root (hd0,0) kernel /vmlinuz-2.6.32-642.el6.x86_64 {kernel_boot_options} ro root=/dev/mapper/VolGroup-lv_root intel_iommu=on rd_NO_LUKS LANG=en_US.UTF-8 rd_NO_MD rd_LVM_LV=VolGroup/lv_swap SYSFONT=latarcyrheb-sun16 crashkernel=auto rd_LVM_LV=VolGroup/lv_root KEYBOARDTYPE=pc KEYTABLE=us rd_NO_DM rhgb quiet initrd /initramfs-2.6.32-642.el6.x86_64.img title Red Hat Enterprise Linux 6 (2.6.32-642.el6.x86_64-2) root (hd0,0) kernel /vmlinuz-2.6.32-642.el6.x86_64 {kernel_boot_options} ro root=/dev/mapper/VolGroup-lv_root rd_NO_LUKS LANG=en_US.UTF-8 rd_NO_MD rd_LVM_LV=VolGroup/lv_swap SYSFONT=latarcyrheb-sun16 crashkernel=auto rd_LVM_LV=VolGroup/lv_root KEYBOARDTYPE=pc KEYTABLE=us rd_NO_DM rhgb quiet initrd /initramfs-2.6.32-642.el6.x86_64.img """.strip() # noqa # rhel-7 GRUB2_TEMPLATE = """ # # DO NOT EDIT THIS FILE # # It is automatically generated by grub2-mkconfig using templates # from /etc/grub.d and settings from /etc/default/grub # ### BEGIN /etc/grub.d/00_header ### set pager=1 terminal_output console ### END /etc/grub.d/00_header ### ### BEGIN /etc/grub.d/00_tuned ### set tuned_params="" ### END /etc/grub.d/00_tuned ### ### BEGIN /etc/grub.d/01_users ### ### END /etc/grub.d/01_users ### ### BEGIN /etc/grub.d/10_linux ### menuentry 'Red Hat Enterprise Linux Server (3.10.0-327.el7.x86_64) 7.2 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-3.10.0-327.el7.x86_64-advanced-4f80b3d4-90ba-4545-869c-febdecc586ce' { load_video set gfxpayload=keep insmod gzio insmod part_msdos insmod xfs set root='hd0,msdos1' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,msdos1 --hint-efi=hd0,msdos1 --hint-baremetal=ahci0,msdos1 --hint='hd0,msdos1' 860a7b56-dbdd-498a-b085-53dc93e4650b else search --no-floppy --fs-uuid --set=root 860a7b56-dbdd-498a-b085-53dc93e4650b fi linux16 /vmlinuz-3.10.0-327.el7.x86_64 %s root=/dev/mapper/rhel-root ro crashkernel=auto rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet LANG=en_US.UTF-8 initrd16 /initramfs-3.10.0-327.el7.x86_64.img } menuentry 'Red Hat Enterprise Linux Server (0-rescue-9f20b35c9faa49aebe171f62a11b236f) 7.2 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-0-rescue-9f20b35c9faa49aebe171f62a11b236f-advanced-4f80b3d4-90ba-4545-869c-febdecc586ce' { load_video insmod gzio insmod part_msdos insmod xfs set root='hd0,msdos1' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,msdos1 --hint-efi=hd0,msdos1 --hint-baremetal=ahci0,msdos1 --hint='hd0,msdos1' 860a7b56-dbdd-498a-b085-53dc93e4650b else search --no-floppy --fs-uuid --set=root 860a7b56-dbdd-498a-b085-53dc93e4650b fi linux16 /vmlinuz-0-rescue-9f20b35c9faa49aebe171f62a11b236f %s root=/dev/mapper/rhel-root ro crashkernel=auto rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet initrd16 /initramfs-0-rescue-9f20b35c9faa49aebe171f62a11b236f.img } """.strip() # noqa GRUB2_TEMPLATE_BLSCFG = """ # # DO NOT EDIT THIS FILE # # It is automatically generated by grub2-mkconfig using templates # from /etc/grub.d and settings from /etc/default/grub # ### BEGIN /etc/grub.d/00_header ### set pager=1 terminal_output console ### END /etc/grub.d/00_header ### ### BEGIN /etc/grub.d/00_tuned ### set tuned_params="" ### END /etc/grub.d/00_tuned ### ### BEGIN /etc/grub.d/01_users ### ### END /etc/grub.d/01_users ### insmod blscfg blscfg if [ -s $prefix/grubenv ]; then load_env fi if [ -z "${kernelopts}" ]; then set kernelopts="root=/dev/mapper/rhel-root ro crashkernel=auto resume=/dev/mapper/rhel-swap rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet transparent_hugepage=never " fi """.strip() # noqa GRUB2_TEMPLATE_NO_BLSCFG = """ # # DO NOT EDIT THIS FILE # # It is automatically generated by grub2-mkconfig using templates # from /etc/grub.d and settings from /etc/default/grub # ### BEGIN /etc/grub.d/00_header ### set pager=1 terminal_output console ### END /etc/grub.d/00_header ### ### BEGIN /etc/grub.d/00_tuned ### set tuned_params="" ### END /etc/grub.d/00_tuned ### ### BEGIN /etc/grub.d/01_users ### ### END /etc/grub.d/01_users ### ### BEGIN /etc/grub.d/10_linux ### menuentry 'Red Hat Enterprise Linux Server (4.18.0-240.el8.x86_64) 8.3 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-3.10.0-327.el7.x86_64-advanced-4f80b3d4-90ba-4545-869c-febdecc586ce' { load_video set gfxpayload=keep insmod gzio insmod part_msdos insmod xfs set root='hd0,msdos1' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,msdos1 --hint-efi=hd0,msdos1 --hint-baremetal=ahci0,msdos1 --hint='hd0,msdos1' 860a7b56-dbdd-498a-b085-53dc93e4650b else search --no-floppy --fs-uuid --set=root 860a7b56-dbdd-498a-b085-53dc93e4650b fi linux16 /vmlinuz-4.18.0-240.el8.x86_64 %s root=/dev/mapper/rhel-root ro crashkernel=auto rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet LANG=en_US.UTF-8 initrd16 /initramfs-4.18.0-240.el8.x86_64.img } menuentry 'Red Hat Enterprise Linux Server (0-rescue-9f20b35c9faa49aebe171f62a11b236f) 8.3 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-0-rescue-9f20b35c9faa49aebe171f62a11b236f-advanced-4f80b3d4-90ba-4545-869c-febdecc586ce' { load_video insmod gzio insmod part_msdos insmod xfs set root='hd0,msdos1' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,msdos1 --hint-efi=hd0,msdos1 --hint-baremetal=ahci0,msdos1 --hint='hd0,msdos1' 860a7b56-dbdd-498a-b085-53dc93e4650b else search --no-floppy --fs-uuid --set=root 860a7b56-dbdd-498a-b085-53dc93e4650b fi linux16 /vmlinuz-0-rescue-9f20b35c9faa49aebe171f62a11b236f %s root=/dev/mapper/rhel-root ro crashkernel=auto rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet initrd16 /initramfs-0-rescue-9f20b35c9faa49aebe171f62a11b236f.img } """.strip() # noqa GRUB2_EFI_CFG = """ ### BEGIN /etc/grub.d/10_linux ### menuentry 'Red Hat Enterprise Linux Server (3.10.0-514.16.1.el7.x86_64) 7.3 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-3.10.0-514.el7.x86_64-advanced-9727cab4-12c2-41a8-9527-9644df34e586' { load_video set gfxpayload=keep insmod gzio insmod part_gpt insmod xfs set root='hd0,gpt2' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,gpt2 --hint-efi=hd0,gpt2 --hint-baremetal=ahci0,gpt2 d80fa96c-ffa1-4894-9282-aeda37f0befe else search --no-floppy --fs-uuid --set=root d80fa96c-ffa1-4894-9282-aeda37f0befe fi linuxefi /vmlinuz-3.10.0-514.16.1.el7.x86_64 root=/dev/mapper/rhel-root ro rd.luks.uuid=luks-a40b320e-0711-4cd6-8f9e-ce32810e2a79 rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet LANG=en_US.UTF-8 initrdefi /initramfs-3.10.0-514.16.1.el7.x86_64.img } menuentry 'Red Hat Enterprise Linux Server (3.10.0-514.10.2.el7.x86_64) 7.3 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-3.10.0-514.el7.x86_64-advanced-9727cab4-12c2-41a8-9527-9644df34e586' { load_video set gfxpayload=keep insmod gzio insmod part_gpt insmod xfs set root='hd0,gpt2' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,gpt2 --hint-efi=hd0,gpt2 --hint-baremetal=ahci0,gpt2 d80fa96c-ffa1-4894-9282-aeda37f0befe else search --no-floppy --fs-uuid --set=root d80fa96c-ffa1-4894-9282-aeda37f0befe fi linuxefi /vmlinuz-3.10.0-514.10.2.el7.x86_64 root=/dev/mapper/rhel-root ro rd.luks.uuid=luks-a40b320e-0711-4cd6-8f9e-ce32810e2a79 rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet LANG=en_US.UTF-8 initrdefi /initramfs-3.10.0-514.10.2.el7.x86_64.img } menuentry 'Red Hat Enterprise Linux Server (3.10.0-514.el7.x86_64) 7.3 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-3.10.0-514.el7.x86_64-advanced-9727cab4-12c2-41a8-9527-9644df34e586' { load_video set gfxpayload=keep insmod gzio insmod part_gpt insmod xfs set root='hd0,gpt2' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,gpt2 --hint-efi=hd0,gpt2 --hint-baremetal=ahci0,gpt2 d80fa96c-ffa1-4894-9282-aeda37f0befe else search --no-floppy --fs-uuid --set=root d80fa96c-ffa1-4894-9282-aeda37f0befe fi linuxefi /vmlinuz-3.10.0-514.el7.x86_64 root=/dev/mapper/rhel-root ro rd.luks.uuid=luks-a40b320e-0711-4cd6-8f9e-ce32810e2a79 rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet LANG=en_US.UTF-8 initrdefi /initramfs-3.10.0-514.el7.x86_64.img } menuentry 'Red Hat Enterprise Linux Server (0-rescue-f1340b5dd6ee4c26b587621566111421) 7.3 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-0-rescue-f1340b5dd6ee4c26b587621566111421-advanced-9727cab4-12c2-41a8-9527-9644df34e586' { load_video insmod gzio insmod part_gpt insmod xfs set root='hd0,gpt2' if [ x$feature_platform_search_hint = xy ]; then search --no-floppy --fs-uuid --set=root --hint-bios=hd0,gpt2 --hint-efi=hd0,gpt2 --hint-baremetal=ahci0,gpt2 d80fa96c-ffa1-4894-9282-aeda37f0befe else search --no-floppy --fs-uuid --set=root d80fa96c-ffa1-4894-9282-aeda37f0befe fi linuxefi /vmlinuz-0-rescue-f1340b5dd6ee4c26b587621566111421 root=/dev/mapper/rhel-root ro rd.luks.uuid=luks-a40b320e-0711-4cd6-8f9e-ce32810e2a79 rd.lvm.lv=rhel/root rd.lvm.lv=rhel/swap rhgb quiet initrdefi /initramfs-0-rescue-f1340b5dd6ee4c26b587621566111421.img } ### END /etc/grub.d/10_linux ### """.strip() # noqa GRUB1_EFI_CFG = """ # grub.conf generated by anaconda # # Note that you do not have to rerun grub after making changes to this file # NOTICE: You have a /boot partition. This means that # all kernel and initrd paths are relative to /boot/, eg. # root (hd0,1) # kernel /vmlinuz-version ro root=/dev/mapper/VolGroup-lv_root # initrd /initrd-[generic-]version.img #boot=/dev/mpathap1 default=0 timeout=5 splashimage=(hd0,1)/grub/splash.xpm.gz hiddenmenu title Red Hat Enterprise Linux (2.6.32-71.el6.x86_64) root (hd0,1) kernel /vmlinuz-2.6.32-71.el6.x86_64 ro root=/dev/mapper/VolGroup-lv_root rd_LVM_LV=VolGroup/lv_root rd_LVM_LV=VolGroup/lv_swap rd_NO_LUKS rd_NO_MD rd_NO_DM LANG=en_US.UTF-8 SYSFONT=latarcyrheb-sun16 KEYBOARDTYPE=pc KEYTABLE=us crashkernel=auto rhgb quiet initrd /initramfs-2.6.32-71.el6.x86_64.img """.strip() # noqa SYS_FIRMWARE_DIR_NOEFI = """ /sys/firmware: total 0 drwxr-xr-x. 5 0 0 0 May 30 11:50 . dr-xr-xr-x. 13 0 0 0 May 30 11:50 .. drwxr-xr-x. 5 0 0 0 May 30 11:50 acpi drwxr-xr-x. 3 0 0 0 May 30 11:51 dmi drwxr-xr-x. 7 0 0 0 May 30 12:31 memmap /sys/firmware/acpi: total 0 drwxr-xr-x. 5 0 0 0 May 30 11:50 . drwxr-xr-x. 5 0 0 0 May 30 11:50 .. drwxr-xr-x. 6 0 0 0 May 30 12:31 hotplug drwxr-xr-x. 2 0 0 0 May 30 12:31 interrupts -r--r--r--. 1 0 0 4096 May 30 12:31 pm_profile drwxr-xr-x. 3 0 0 0 May 30 11:50 tables """.strip() SYS_FIRMWARE_DIR_EFI = """ /sys/firmware: total 0 drwxr-xr-x. 5 0 0 0 May 30 11:50 . dr-xr-xr-x. 13 0 0 0 May 30 11:50 .. drwxr-xr-x. 5 0 0 0 May 30 11:50 acpi drwxr-xr-x. 3 0 0 0 May 30 11:51 dmi drwxr-xr-x. 7 0 0 0 May 30 12:31 memmap /sys/firmware/efi: total 0 drwxr-xr-x. 5 0 0 0 May 30 11:50 . drwxr-xr-x. 5 0 0 0 May 30 11:50 .. """.strip() INSTALLED_RPMS_V1 = """ grub-0.97-94.el6.x86_64 Mon Jan 8 18:35:25 2018 libreport-compat-2.0.9-24.el6.x86_64 Mon Jan 8 18:32:59 2018 make-3.81-20.el6.x86_64 Mon Jan 8 18:31:49 2018 """.strip() INSTALLED_RPMS_V2 = """ grub2-2.02-0.44.el7.x86_64 Wed May 10 14:10:30 2017 libwbclient-4.4.4-12.el7_3.x86_64 Wed May 10 14:08:10 2017 xorg-x11-drv-vmmouse-13.0.0-12.el7.x86_64 Wed May 10 14:10:36 2017 """.strip() CMDLINE_V1 = """ ro root=/dev/mapper/vg_rhel6box-lv_root rd_NO_LUKS LANG=en_US.UTF-8 rd_LVM_LV=vg_rhel6box/lv_swap rd_LVM_LV=vg_rhel6box/lv_root rd_NO_MD SYSFONT=latarcyrheb-sun16 crashkernel=129M@0M KEYBOAR DTYPE=pc KEYTABLE=us rd_NO_DM rhgb quiet """.strip() # noqa CMDLINE_V2 = """ BOOT_IMAGE=/vmlinuz-3.10.0-514.10.2.el7.x86_64 root=/dev/mapper/vg_system-lv_root ro crashkernel=auto rd.lvm.lv=vg_system/lv_root rd.lvm.lv=vg_system/lv_swap rhgb quiet LANG=en_US.UTF-8 """.strip() # noqa BOOT_LOADER_ENTRIES_1 = """ title Red Hat Enterprise Linux (4.18.0-80.1.2.el8_0.x86_64) 8.0 (Ootpa) version 4.18.0-80.1.2.el8_0.x86_64 linux /vmlinuz-4.18.0-80.1.2.el8_0.x86_64 initrd /initramfs-4.18.0-80.1.2.el8_0.x86_64.img $tuned_initrd options root=/dev/mapper/rhel_vm37--146-root ro crashkernel=auto resume=/dev/mapper/rhel_vm37--146-swap rd.lvm.lv=rhel_vm37-146/root rd.lvm.lv=rhel_vm37-146/swap $tuned_params noapic id rhel-20190428101407-4.18.0-80.1.2.el8_0.x86_64 grub_users $grub_users grub_arg --unrestricted grub_class kernel """.strip() # noqa BOOT_LOADER_ENTRIES_2 = """ title Red Hat Enterprise Linux (4.18.0-32.el8.x86_64) 8.0 (Ootpa) version 4.18.0-32.el8.x86_64 linux /vmlinuz-4.18.0-32.el8.x86_64 initrd /initramfs-4.18.0-32.el8.x86_64.img options root=/dev/mapper/rhel_rhel8-root ro elevator=noop no_timer_check crashkernel=auto resume=/dev/mapper/rhel_rhel8-swap rd.lvm.lv=rhel_rhel8/root rd.lvm.lv=rhel_rhel8/swap biosdevname=0 net.ifnames=0 rhgb id rhel-20181027203430-4.18.0-32.el8.x86_64 grub_users $grub_users grub_arg --unrestricted grub_class kernel """.strip() # noqa BOOT_LOADER_ENTRIES_3 = """ title Red Hat Enterprise Linux (4.18.0-305.el8.x86_64) 8.4 (Ootpa) version 4.18.0-305.el8.x86_64 linux /vmlinuz-4.18.0-305.el8.x86_64 initrd /initramfs-4.18.0-305.el8.x86_64.img $tuned_initrd options $kernelopts $tuned_params id rhel-20210429130346-4.18.0-305.el8.x86_64 grub_users $grub_users grub_arg --unrestricted grub_class kernel """.strip() GRUBENV_WITH_TUNED_PARAMS = """ # GRUB Environment Block saved_entry=295e1ba1696e4fad9e062f096f92d147-4.18.0-305.el8.x86_64 kernelopts=root=/dev/mapper/root_vg-lv_root ro crashkernel=auto resume=/dev/mapper/root_vg-lv_swap rd.lvm.lv=root_vg/lv_root rd.lvm.lv=root_vg/lv_swap console=tty0 console=ttyS0,115200 noapic boot_success=0 boot_indeterminate=2 tuned_params=transparent_hugepages=never tuned_initrd= ############################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################### """.strip() # noqa def test_grub1_only1(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) cmdline = CmdLine(context_wrap(CMDLINE_V1)) result = GrubConf(grub1, None, None, None, None, None, cmdline, None) assert result.kernel_initrds['grub_kernels'][0] == 'vmlinuz-2.6.32-642.el6.x86_64' assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-2.6.32-642.el6.x86_64.img' assert result.is_kdump_iommu_enabled is True assert result.get_grub_cmdlines() == result.get_grub_cmdlines('/vmlinuz') assert len(result.get_grub_cmdlines()) == 3 assert result.version == 1 assert result.is_efi is False def test_grub1_cmdline(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) cmdline = CmdLine(context_wrap(CMDLINE_V1)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, None, cmdline, sys_firmware) assert result.kernel_initrds['grub_kernels'][0] == 'vmlinuz-2.6.32-642.el6.x86_64' assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-2.6.32-642.el6.x86_64.img' assert result.is_kdump_iommu_enabled is True assert result.get_grub_cmdlines() == result.get_grub_cmdlines('/vmlinuz') assert len(result.get_grub_cmdlines()) == 3 assert result.version == 1 assert result.is_efi is False def test_grub1_efi_cmdline(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) cmdline = CmdLine(context_wrap(CMDLINE_V1)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_EFI)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, None, cmdline, sys_firmware) assert result.kernel_initrds['grub_kernels'][0] == 'vmlinuz-2.6.32-71.el6.x86_64' assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-2.6.32-71.el6.x86_64.img' assert result.is_kdump_iommu_enabled is False assert len(result.get_grub_cmdlines()) == 1 assert result.version == 1 assert result.is_efi is True def test_grub1_rpms(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V1)) cmdline = CmdLine(context_wrap(CMDLINE_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, rpms, cmdline, sys_firmware) assert result.kernel_initrds['grub_kernels'][0] == 'vmlinuz-2.6.32-642.el6.x86_64' assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-2.6.32-642.el6.x86_64.img' assert result.is_kdump_iommu_enabled is True assert result.get_grub_cmdlines() == result.get_grub_cmdlines('/vmlinuz') assert len(result.get_grub_cmdlines()) == 3 assert result.version == 1 assert result.is_efi is False def test_grub1_efi_rpms(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V1)) cmdline = CmdLine(context_wrap(CMDLINE_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_EFI)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, rpms, cmdline, sys_firmware) assert result.kernel_initrds['grub_kernels'][0] == 'vmlinuz-2.6.32-71.el6.x86_64' assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-2.6.32-71.el6.x86_64.img' assert result.is_kdump_iommu_enabled is False assert result.get_grub_cmdlines() == result.get_grub_cmdlines('/vmlinuz') assert len(result.get_grub_cmdlines()) == 1 assert result.version == 1 assert result.is_efi is True def test_grub2_cmdline(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) cmdline = CmdLine(context_wrap(CMDLINE_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, None, cmdline, sys_firmware) assert result.kernel_initrds['grub_kernels'][0] == 'vmlinuz-3.10.0-327.el7.x86_64' assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-3.10.0-327.el7.x86_64.img' assert result.is_kdump_iommu_enabled is False assert result.get_grub_cmdlines('/vmlinuz-3.10.0')[0].name == "'Red Hat Enterprise Linux Server (3.10.0-327.el7.x86_64) 7.2 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-3.10.0-327.el7.x86_64-advanced-4f80b3d4-90ba-4545-869c-febdecc586ce'" # noqa assert result.get_grub_cmdlines('test') == [] assert result.get_grub_cmdlines('') == [] assert len(result.get_grub_cmdlines()) == 2 assert result.version == 2 assert result.is_efi is False def test_grub2_efi_cmdline(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) cmdline = CmdLine(context_wrap(CMDLINE_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_EFI)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, None, cmdline, sys_firmware) assert result.get_grub_cmdlines() == result.get_grub_cmdlines('/vmlinuz') assert result.get_grub_cmdlines('rescue')[0].name.startswith("'Red Hat Enterprise Linux Server (0-rescue") assert len(result.get_grub_cmdlines()) == 4 assert result.version == 2 assert result.is_efi is True def test_grub2_rpms(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) cmdline = CmdLine(context_wrap(CMDLINE_V1)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, rpms, cmdline, None) assert result.kernel_initrds['grub_kernels'][0] == 'vmlinuz-3.10.0-327.el7.x86_64' assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-3.10.0-327.el7.x86_64.img' assert result.is_kdump_iommu_enabled is False assert result.get_grub_cmdlines('/vmlinuz-3.10.0')[0].name == "'Red Hat Enterprise Linux Server (3.10.0-327.el7.x86_64) 7.2 (Maipo)' --class red --class gnu-linux --class gnu --class os --unrestricted $menuentry_id_option 'gnulinux-3.10.0-327.el7.x86_64-advanced-4f80b3d4-90ba-4545-869c-febdecc586ce'" # noqa assert result.get_grub_cmdlines('test') == [] assert result.get_grub_cmdlines('') == [] assert len(result.get_grub_cmdlines()) == 2 assert result.version == 2 assert result.is_efi is False def test_grub2_efi_rpms(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub1e = Grub1EFIConfig(context_wrap(GRUB1_EFI_CFG)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) cmdline = CmdLine(context_wrap(CMDLINE_V1)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_EFI)) result = GrubConf(grub1, grub2, grub1e, grub2e, None, rpms, cmdline, sys_firmware) assert result.kernel_initrds['grub_initrds'][0] == 'initramfs-3.10.0-514.16.1.el7.x86_64.img' assert result.get_grub_cmdlines() == result.get_grub_cmdlines('/vmlinuz') assert result.get_grub_cmdlines('rescue')[0].name.startswith("'Red Hat Enterprise Linux Server (0-rescue") assert len(result.get_grub_cmdlines()) == 4 assert result.version == 2 assert result.is_efi is True def test_get_grub_cmdlines_none(): grub1 = Grub1Config(context_wrap(GRUB1_TEMPLATE)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) cmdline = CmdLine(context_wrap(CMDLINE_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_EFI)) with pytest.raises(Exception) as pe: GrubConf(grub1, grub2, None, None, None, None, cmdline, sys_firmware) assert "No valid grub configuration is found." in str(pe.value) grub1e = Grub1EFIConfig(context_wrap(GRUB1_TEMPLATE)) grub2e = Grub2EFIConfig(context_wrap(GRUB2_TEMPLATE)) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) with pytest.raises(Exception) as pe: GrubConf(None, None, grub1e, grub2e, None, rpms, None, None) assert "No valid grub configuration is found." in str(pe.value) grub2e = Grub2EFIConfig(context_wrap(GRUB2_EFI_CFG)) with pytest.raises(Exception) as pe: GrubConf(grub1, None, grub1e, grub2e, None, rpms, None, None) assert "No valid grub configuration is found." in str(pe.value) def test_grub2_grubenv(): grubenv = GrubEnv(context_wrap(GRUBENV_WITH_TUNED_PARAMS)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE)) grub_ble1 = BLE(context_wrap(BOOT_LOADER_ENTRIES_1)) grub_ble2 = BLE(context_wrap(BOOT_LOADER_ENTRIES_2)) grub_bles = BootLoaderEntries([grub_ble1, grub_ble2], grubenv, None) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(None, grub2, None, None, grub_bles, rpms, None, sys_firmware) assert len(result.get_grub_cmdlines()) == 2 assert 'noapic' not in result.get_grub_cmdlines()[1]['cmdline'] assert 'transparent_hugepages' not in result.get_grub_cmdlines()[0]['cmdline'] assert result.version == 2 assert not result.is_efi def test_grub2_grubenv_with_kernelopts(): grubenv = GrubEnv(context_wrap(GRUBENV_WITH_TUNED_PARAMS)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE_BLSCFG)) grub_ble1 = BLE(context_wrap(BOOT_LOADER_ENTRIES_1)) grub_ble2 = BLE(context_wrap(BOOT_LOADER_ENTRIES_2)) grub_ble3 = BLE(context_wrap(BOOT_LOADER_ENTRIES_3)) grub_bles = BootLoaderEntries([grub_ble1, grub_ble2, grub_ble3], grubenv, None) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(None, grub2, None, None, grub_bles, rpms, None, sys_firmware) assert len(result.get_grub_cmdlines()) == 3 assert 'noapic' in result.get_grub_cmdlines()[2]['cmdline'] assert 'transparent_hugepages' in result.get_grub_cmdlines()[2]['cmdline'] assert result.version == 2 assert not result.is_efi def test_grub2_with_blscfg(): grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE_BLSCFG)) grub_ble1 = BLE(context_wrap(BOOT_LOADER_ENTRIES_1)) grub_ble2 = BLE(context_wrap(BOOT_LOADER_ENTRIES_2)) grub_ble3 = BLE(context_wrap(BOOT_LOADER_ENTRIES_3)) grub_bles = BootLoaderEntries([grub_ble1, grub_ble2, grub_ble3], None, None) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(None, grub2, None, None, grub_bles, rpms, None, sys_firmware) assert len(result.get_grub_cmdlines()) == 3 assert 'noapic' in result.get_grub_cmdlines()[0]['cmdline'] assert 'transparent_hugepages' not in result.get_grub_cmdlines()[0]['cmdline'] assert result.version == 2 assert not result.is_efi def test_grub2_boot_loader_entries(): grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE_BLSCFG)) grub_ble1 = BLE(context_wrap(BOOT_LOADER_ENTRIES_1)) grub_ble2 = BLE(context_wrap(BOOT_LOADER_ENTRIES_2)) grub_bles = BootLoaderEntries([grub_ble1, grub_ble2], None, None) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(None, grub2, None, None, grub_bles, rpms, None, sys_firmware) assert len(result.get_grub_cmdlines()) == 2 assert 'noapic' in result.get_grub_cmdlines()[0]['cmdline'] assert result.version == 2 assert not result.is_efi def test_grub2_boot_loader_entries_with_grubenv(): grubenv = GrubEnv(context_wrap(GRUBENV_WITH_TUNED_PARAMS)) grub2 = Grub2Config(context_wrap(GRUB2_TEMPLATE_BLSCFG)) grub_ble1 = BLE(context_wrap(BOOT_LOADER_ENTRIES_1)) grub_ble3 = BLE(context_wrap(BOOT_LOADER_ENTRIES_3)) grub_bles = BootLoaderEntries([grub_ble1, grub_ble3], grubenv, None) rpms = InstalledRpms(context_wrap(INSTALLED_RPMS_V2)) sys_firmware = LsSysFirmware(context_wrap(SYS_FIRMWARE_DIR_NOEFI)) result = GrubConf(None, grub2, None, None, grub_bles, rpms, None, sys_firmware) assert len(result.get_grub_cmdlines()) == 2 assert 'noapic' in result.get_grub_cmdlines()[0]['cmdline'] assert 'transparent_hugepages' in result.get_grub_cmdlines()[0]['cmdline'] assert 'noapic' in result.get_grub_cmdlines()[1]['cmdline'] assert 'transparent_hugepages' in result.get_grub_cmdlines()[1]['cmdline'] assert result.version == 2 assert not result.is_efi
49.399031
655
0.71679
4,697
30,578
4.478603
0.078774
0.048108
0.032801
0.044923
0.915716
0.900171
0.881679
0.86081
0.831099
0.817266
0
0.100084
0.13997
30,578
618
656
49.478964
0.699825
0.002322
0
0.710383
0
0.102004
0.578578
0.240489
0
0
0
0
0.167577
1
0.027322
false
0
0.014572
0
0.041894
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
809f81ad42b734bc6c56c050639f3dd17faf53ee
22,281
py
Python
Scraping/Scraping_YouTube/youtube_driver/__init__.py
ghassen1302/Interview_Code_Demonstration
fd9e2b313d3203e79e4f40bd52f82365508126d2
[ "Apache-2.0" ]
null
null
null
Scraping/Scraping_YouTube/youtube_driver/__init__.py
ghassen1302/Interview_Code_Demonstration
fd9e2b313d3203e79e4f40bd52f82365508126d2
[ "Apache-2.0" ]
null
null
null
Scraping/Scraping_YouTube/youtube_driver/__init__.py
ghassen1302/Interview_Code_Demonstration
fd9e2b313d3203e79e4f40bd52f82365508126d2
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 from .get_authenticated import GetAuthenticated from .get_comments import GetComments #from .get_images import GetImages from abc import ABC import googleapiclient.discovery # pip install google-api-python-client | pip install protobuf==3.12.2 | pip install --upgrade protobuf --force-reinstall import googleapiclient.errors from google.auth.transport.requests import Request # pip install google import time import sys import json __version__ = '1.0.0' class Driver(ABC): def get_event(self, user_id, verbose=False): pass def get_page(self, user_id, verbose=False): pass def get_group(self, user_id, verbose=False): pass def get_images(self, user_id, verbose=False): pass def get_user_info(self, user_id, json_parser=False, verbose=False): pass def get_user_info_by_pseudo(self, pseudo, json_parser=False, verbose=False): pass def get_publications(self, user_id, comments, json_parser=False, verbose=False): pass def get_comments_by_publication(self, publication_id, json_parser=False, verbose=False): pass def get_friends(self, user_id, verbose=False): pass def get_react(self, publication_id, verbose=False): pass def get_comment_by_key(self, key, json_parser=False, verbose=False): pass def get_comment_by_keys(self, keys, json_parser=False, verbose=False): pass class YoutubeDriver(Driver): def get_friends(self, user_id): pass def get_event(self, user_id): pass def get_page(self, user_id): pass def get_group(self, user_id): pass def get_comment_by_key(self, key, json_parser=False, verbose=False): ''' Uses a keyword (video name that we are searching for) and returns the messages in the videos. ''' file_pos = 0 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() while(file_pos < file_num): try: request = youtube.search().list( q=key, part="snippet", type="video" ) response = request.execute() videoId = response['items'][0]['id']['videoId'] # Get the video id. # Get the video messages. request2 = youtube.commentThreads().list( part="snippet,replies", videoId=str(videoId) ) try: response2 = request2.execute() except: # The video is private. continue getComments = GetComments(response=response2, youtube=youtube, json_parser=json_parser, verbose=verbose) # Scrape the video messages. if(json_parser): getComments.get_comments() else: return getComments.get_comments() break except: file_pos += 1 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() pass def get_comment_by_keys(self, keys, json_parser=False, verbose=False): ''' Uses a keyword (video name that we are searching for) and returns the messages in the videos. ''' file_pos = 0 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() l = [] while(file_pos < file_num): for key in keys: try: request = youtube.search().list( q=key, part="snippet", type="video" ) response = request.execute() videoId = response['items'][0]['id']['videoId'] # Get the video id. # Get the video messages. request2 = youtube.commentThreads().list( part="snippet,replies", videoId=str(videoId) ) try: response2 = request2.execute() except: # The video is private. continue getComments = GetComments(response=response2, youtube=youtube, json_parser=json_parser, verbose=verbose) # Scrape the video messages. if(json_parser): getComments.get_comments() else: l.append(getComments.get_comments()) except: file_pos += 1 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() pass if(not json_parser): return l break def get_images(self, user_id): pass def get_react(self, publication_id, verbose=False): file_pos = 0 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() while(file_pos < file_num): try: # Search for the channel id using the youtube username. request = youtube.videos().list( part="statistics", id=publication_id ) response = request.execute() return response['items'][0]['statistics']['likeCount'] except: file_pos += 1 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() pass def get_user_info(self, user_id, json_parser=False, verbose=False): file_pos = 0 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() while(file_pos < file_num): try: # Search for the channel id using the youtube username. request = youtube.channels().list( part="statistics", id=user_id ) response = request.execute() try: channel_id = str(response['items'][0]['id']) except: channel_id = "" try: view_count = str(response['items'][0]['statistics']['viewCount']) except: view_count = "" try: subscriber_count = str(response['items'][0]['statistics']['subscriberCount']) except: subscriber_count = "" try: video_count = str(response['items'][0]['statistics']['videoCount']) except: video_count = "" request = youtube.channels().list( part="snippet", id=user_id ) response = request.execute() try: channel_name = response['items'][0]['snippet']['title'] except: channel_name = "" try: description = response['items'][0]['snippet']['description'] except: description = "" try: channel_creation_date = response['items'][0]['snippet']['publishedAt'] except: channel_creation_date = "" try: profile_image = response['items'][0]['snippet']['thumbnails']['default']['url'] except: profile_image = "" try: country = response['items'][0]['snippet']['country'] except: country = "" request = youtube.channels().list( part="topicDetails", id=user_id ) response = request.execute() try: channel_topic_categories = list(response['items'][0]['topicDetails']['topicCategories']) except: channel_topic_categories = [] request = youtube.channels().list( part="brandingSettings", id=user_id ) response = request.execute() try: keywords = str(response['items'][0]['brandingSettings']['channel']['keywords']) except: keywords = "" try: featured_channels = list(response['items'][0]['brandingSettings']['channel']['featuredChannelsUrls']) except: featured_channels = [] try: unsubscribed_trailer = str(response['items'][0]['brandingSettings']['channel']['unsubscribedTrailer']) except: unsubscribed_trailer = "" try: channel_banner_image = str(response['items'][0]['brandingSettings']['image']['bannerImageUrl']) except: channel_banner_image = "" if (verbose): print("channel_id: "+ channel_id) print("view_count: "+ view_count) print("subscriber_count: "+ subscriber_count) print("video_count: "+ video_count) print("channel_name: "+ channel_name) print("description: "+ description) print("channel_creation_date: "+ channel_creation_date) print("profile_image: "+ profile_image) print("country: "+ country) print('channel_topic_categories: ' + str(channel_topic_categories)) print('keywords: ' + keywords) print('featured_channels: ' + str(featured_channels)) print('unsubscribed_trailer: ' + unsubscribed_trailer) print('channel_banner_image: ' + channel_banner_image) break except: file_pos += 1 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() pass data = {} data['channel_id'] = channel_id data['view_count'] = view_count data['subscriber_count'] = subscriber_count data['video_count'] = video_count data['channel_name'] = channel_name data['description'] = description data['channel_creation_date'] = channel_creation_date data['profile_image'] = profile_image data['country'] = country data['channel_topic_categories'] = channel_topic_categories data['keywords'] = keywords data['featured_channels'] = featured_channels data['unsubscribed_trailer'] = unsubscribed_trailer data['channel_banner_image'] = channel_banner_image if(not json_parser): return data else: with open(channel_name + '_profile_data.json', 'w') as f: json.dump(data, f) def get_user_info_by_pseudo(self, pseudo, json_parser=False, verbose=False): file_pos = 0 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() while(file_pos < file_num): try: # Search for the channel id using the youtube username. request = youtube.search().list( q=pseudo, part="snippet", type="channel" ) response = request.execute() channel_id = response['items'][0]['id']['channelId'] request2 = youtube.channels().list( part="statistics", id=channel_id ) response = request2.execute() if (verbose): print(response['items'][0]) try: channel_id = str(response['items'][0]['id']) except: channel_id = "" try: view_count = str(response['items'][0]['statistics']['viewCount']) except: view_count = "" try: subscriber_count = str(response['items'][0]['statistics']['subscriberCount']) except: subscriber_count = "" try: video_count = str(response['items'][0]['statistics']['videoCount']) except: video_count = "" request3 = youtube.channels().list( part="snippet", id=channel_id ) response = request3.execute() try: channel_name = response['items'][0]['snippet']['title'] except: channel_name = "" try: description = response['items'][0]['snippet']['description'] except: description = "" try: channel_creation_date = response['items'][0]['snippet']['publishedAt'] except: channel_creation_date = "" try: profile_image = response['items'][0]['snippet']['thumbnails']['default']['url'] except: profile_image = "" try: country = response['items'][0]['snippet']['country'] except: country = "" request4 = youtube.channels().list( part="topicDetails", id=channel_id ) response = request4.execute() try: channel_topic_categories = list(response['items'][0]['topicDetails']['topicCategories']) except: channel_topic_categories = [] request5 = youtube.channels().list( part="brandingSettings", id=channel_id ) response = request5.execute() try: keywords = str(response['items'][0]['brandingSettings']['channel']['keywords']) except: keywords = "" try: featured_channels = list(response['items'][0]['brandingSettings']['channel']['featuredChannelsUrls']) except: featured_channels = [] try: unsubscribed_trailer = str(response['items'][0]['brandingSettings']['channel']['unsubscribedTrailer']) except: unsubscribed_trailer = "" try: channel_banner_image = str(response['items'][0]['brandingSettings']['image']['bannerImageUrl']) except: channel_banner_image = "" if (verbose): print("channel_id: "+ channel_id) print("view_count: "+ view_count) print("subscriber_count: "+ subscriber_count) print("video_count: "+ video_count) print("channel_name: "+ channel_name) print("description: "+ description) print("channel_creation_date: "+ channel_creation_date) print("profile_image: "+ profile_image) print("country: "+ country) print('channel_topic_categories: ' + str(channel_topic_categories)) print('keywords: ' + keywords) print('featured_channels: ' + str(featured_channels)) print('unsubscribed_trailer: ' + unsubscribed_trailer) print('channel_banner_image: ' + channel_banner_image) break except: file_pos += 1 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() pass data = {} data['channel_id'] = channel_id data['view_count'] = view_count data['subscriber_count'] = subscriber_count data['video_count'] = video_count data['channel_name'] = channel_name data['description'] = description data['channel_creation_date'] = channel_creation_date data['profile_image'] = profile_image data['country'] = country data['channel_topic_categories'] = channel_topic_categories data['keywords'] = keywords data['featured_channels'] = featured_channels data['unsubscribed_trailer'] = unsubscribed_trailer data['channel_banner_image'] = channel_banner_image if(not json_parser): return data else: with open(channel_name + '_profile_data.json', 'w') as f: json.dump(data, f) def get_publications(self, user_id, comments, json_parser=False, verbose=False): ''' Uses the Youtube username and returns the messages in the videos of the user. ''' file_pos = 0 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() while(file_pos < file_num): try: request = youtube.search().list( q=user_id, part="snippet", type="channel" ) response = request.execute() channel_id = response['items'][0]['id']['channelId'] # Get the channel id. # Get the videos in the channel. request = youtube.commentThreads().list( part="snippet,replies", allThreadsRelatedToChannelId=channel_id, maxResults=100 ) response = request.execute() getComments = GetComments(response=response, youtube=youtube, json_parser=json_parser, take_comments=comments, verbose=verbose) # Scrape the comments in the video. if(json_parser): getComments.get_comments() else: return getComments.get_comments() break except: file_pos += 1 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() pass def get_comments_by_publication(self, publication_id, json_parser=False, verbose=False): ''' Uses a keyword (video name that we are searching for) and returns the messages in the videos. ''' file_pos = 0 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() while(file_pos < file_num): try: videoId = str(publication_id) # Get the video id. # Get the video messages. request2 = youtube.commentThreads().list( part="snippet,replies", videoId=str(videoId) ) try: response2 = request2.execute() except: # The video is private. continue getComments = GetComments(response=response2, youtube=youtube, json_parser=json_parser, verbose=verbose) # Scrape the video messages. if(json_parser): getComments.get_comments() else: return getComments.get_comments() break except: file_pos += 1 getAuthenticated = GetAuthenticated(file_pos) # Read authentification informations from code_secret_file youtube, file_num = getAuthenticated.get_authenticated() pass
38.349398
185
0.504152
1,866
22,281
5.810289
0.092712
0.022597
0.043903
0.05036
0.905645
0.899004
0.85713
0.846707
0.826416
0.818115
0
0.005993
0.408375
22,281
580
186
38.415517
0.816492
0.084915
0
0.859729
0
0
0.098984
0.013224
0
0
0
0
0
1
0.054299
false
0.054299
0.020362
0
0.095023
0.065611
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
80b728a8deccf2a23c02197e35a0f7ea22282439
194
py
Python
filebrowser/tests/__init__.py
klueska/django-filebrowser
71cd35e876529d526a5b31380609b93209c7a0e4
[ "BSD-3-Clause" ]
6
2016-03-10T19:38:17.000Z
2021-02-23T09:34:59.000Z
filebrowser/tests/__init__.py
klueska/django-filebrowser
71cd35e876529d526a5b31380609b93209c7a0e4
[ "BSD-3-Clause" ]
602
2015-01-05T16:30:08.000Z
2021-02-02T21:44:38.000Z
filebrowser/tests/__init__.py
klueska/django-filebrowser
71cd35e876529d526a5b31380609b93209c7a0e4
[ "BSD-3-Clause" ]
18
2015-02-12T15:50:17.000Z
2021-04-27T16:40:36.000Z
from filebrowser.tests.settings import SettingsTests from filebrowser.tests.base import FileObjectPathTests, FileObjectVersionTests, FileObjectUnicodeTests from filebrowser.tests.sites import *
48.5
102
0.881443
19
194
9
0.578947
0.263158
0.350877
0
0
0
0
0
0
0
0
0
0.072165
194
3
103
64.666667
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
80d89eb7e6e0523fd3789794b7ff46300cbe177c
193
py
Python
django_flickr_gallery/admin/__init__.py
ezyatev/django-flickr-gallery
a41622f944671de702639370fffeed04fdaa8d8a
[ "BSD-3-Clause" ]
13
2015-04-17T12:33:53.000Z
2017-10-08T21:47:40.000Z
django_flickr_gallery/admin/__init__.py
ezyatev/django-flickr-gallery
a41622f944671de702639370fffeed04fdaa8d8a
[ "BSD-3-Clause" ]
10
2015-10-30T11:43:55.000Z
2018-03-30T01:35:25.000Z
django_flickr_gallery/admin/__init__.py
ezyatev/django-flickr-gallery
a41622f944671de702639370fffeed04fdaa8d8a
[ "BSD-3-Clause" ]
7
2015-11-22T12:26:06.000Z
2018-09-16T12:51:13.000Z
from django.contrib import admin from django_flickr_gallery.admin.photoset import PhotosetAdmin from django_flickr_gallery.models import Photoset admin.site.register(Photoset, PhotosetAdmin)
27.571429
62
0.870466
25
193
6.56
0.48
0.182927
0.195122
0.280488
0
0
0
0
0
0
0
0
0.082902
193
6
63
32.166667
0.926554
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
80f61b1e1dbc67c6e420353a2160d78cad17fcab
13,562
py
Python
präsentation-verfahren-zur-numerischen-integration/manim.py
lwskdesign/mathe-lk
280b47b078fd34395b32a08e599ef5c302dc60e5
[ "MIT" ]
null
null
null
präsentation-verfahren-zur-numerischen-integration/manim.py
lwskdesign/mathe-lk
280b47b078fd34395b32a08e599ef5c302dc60e5
[ "MIT" ]
null
null
null
präsentation-verfahren-zur-numerischen-integration/manim.py
lwskdesign/mathe-lk
280b47b078fd34395b32a08e599ef5c302dc60e5
[ "MIT" ]
null
null
null
from manim import * import numpy as np import scipy.stats as stats class Mittelpunktsregel(GraphScene): CONFIG={ "x_min": -1, "x_max": 10, "x_axis_width": 9, "y_min": -1, "y_max": 10, "y_axis_height": 6 } def construct(self): title=TextMobject("Mittelpunktsregel").scale(1.5) subtitle=TextMobject("Verfahren zur numerischen Integration I") subtitle.next_to(title, DOWN) subtitle_alt=TextMobject("Rechtecke unter dem Graphen von $f$") subtitle_alt.next_to(title, DOWN) self.play(Write(title), Write(subtitle)) self.wait(2.5) self.play(Transform(subtitle,subtitle_alt)) self.wait(2.5) self.play(FadeOut(title), FadeOut(subtitle)) self.setup_axes(animate=True) #Koordinatensystem anzeigen def func(x): return 0.1 * (x + 3 - 5) * (x - 3 - 5) * (x - 5) + 5 #Darzustellende Funktion graph = self.get_graph(func, x_min=-1, x_max=10) #Graphen erstellen graph.set_color(RED) graph_label = self.get_graph_label(graph, label = "f") self.play(ShowCreation(graph), run_time=3) #Graphen anzeigen self.play(Write(graph_label)) self.wait() a_grenze = 3 a_grenze_line = self.get_vertical_line_to_graph(a_grenze, graph, line_class=DashedLine, color=BLUE) a_grenze_label = TextMobject("$a$", ).scale(0.75) a_grenze_label.next_to(a_grenze_line,DOWN) self.play(ShowCreation(a_grenze_line), Write(a_grenze_label)) self.wait(0.5) b_grenze = 6 b_grenze_line = self.get_vertical_line_to_graph(b_grenze, graph, line_class=DashedLine, color=BLUE) b_grenze_label = TextMobject("$b$").scale(0.75) b_grenze_label.next_to(b_grenze_line,DOWN) self.play(ShowCreation(b_grenze_line), Write(b_grenze_label)) self.wait(0.5) c_mitte = (a_grenze+b_grenze)/2 c_mitte_line = self.get_vertical_line_to_graph(c_mitte, graph, line_class=DashedLine, color=BLUE) c_mitte_label = MathTex(r"c=\frac{a+b}{2}").scale(0.5) c_mitte_label.next_to(c_mitte_line,DOWN) c_mitte_label2 = MathTex(r"f(c)= f\left( \frac{a+b}{2} \right)").scale(0.5) c_mitte_label2.next_to(c_mitte_line,UP) self.play(ShowCreation(c_mitte_line), Write(c_mitte_label), Write(c_mitte_label2)) self.wait(2) midInt = self.get_riemann_rectangles( graph, x_min=a_grenze, x_max=b_grenze, dx=b_grenze-a_grenze, input_sample_type="center", fill_opacity=0.5, stroke_width=0, ) self.play(FadeIn(midInt)) self.wait(5) class Sehentrapezregel(GraphScene): CONFIG={ "x_min": -1, "x_max": 10, "x_axis_width": 9, "y_min": -1, "y_max": 10, "y_axis_height": 6, "default_riemann_start_color": GREEN, "default_riemann_end_color": GREEN, } def construct(self): title=TextMobject("Sehnentrapezregel").scale(1.5) subtitle=TextMobject("Verfahren zur numerischen Integration II") subtitle.next_to(title, DOWN) subtitle_alt=TextMobject("Trapeze unter dem Graphen von $f$") subtitle_alt.next_to(title, DOWN) self.play(Write(title), Write(subtitle)) self.wait(2.5) self.play(Transform(subtitle,subtitle_alt)) self.wait(2.5) self.play(FadeOut(title), FadeOut(subtitle)) self.setup_axes(animate=True) #Koordinatensystem anzeigen def func(x): return 0.1 * (x + 3 - 5) * (x - 3 - 5) * (x - 5) + 5 #Darzustellende Funktion graph = self.get_graph(func, x_min=-1, x_max=10) #Graphen erstellen graph.set_color(RED) graph_label = self.get_graph_label(graph, label = "f") self.play(ShowCreation(graph), run_time=3) #Graphen anzeigen self.play(Write(graph_label)) self.wait() a_grenze = 3 a_grenze_line = self.get_vertical_line_to_graph(a_grenze, graph, line_class=DashedLine, color=BLUE) a_grenze_label = TextMobject("$a$", ).scale(0.75) a_grenze_label.next_to(a_grenze_line,DOWN) self.play(ShowCreation(a_grenze_line), Write(a_grenze_label)) self.wait(0.5) b_grenze = 6 b_grenze_line = self.get_vertical_line_to_graph(b_grenze, graph, line_class=DashedLine, color=BLUE) b_grenze_label = TextMobject("$b$").scale(0.75) b_grenze_label.next_to(b_grenze_line,DOWN) self.play(ShowCreation(b_grenze_line), Write(b_grenze_label)) self.wait(2) a_punkt = ValueTracker(a_grenze) #Punkt in (a|f(a)) zeichnen a_punkt_wert = a_punkt.get_value() a_punkt_geometrie = Dot().move_to(self.coords_to_point(a_punkt_wert, func(a_punkt_wert))) b_punkt = ValueTracker(b_grenze) #Punkt in (b|f(b)) zeichnen b_punkt_wert = b_punkt.get_value() b_punkt_geometrie = Dot().move_to(self.coords_to_point(b_punkt_wert, func(b_punkt_wert))) self.play(FadeIn(a_punkt_geometrie), FadeIn(b_punkt_geometrie)) self.wait(2) x = [a_grenze, b_grenze] y = [func(x[0]), func(x[1])] lineare_regression = np.poly1d(np.polyfit(x, y, 1)) trapLine = self.get_graph(lineare_regression, x_min=1, x_max=8, color=GREEN) self.play(FadeIn(trapLine)) self.wait(2) trapInt = self.get_riemann_rectangles( trapLine, x_min=3, x_max=6, dx=0.001, input_sample_type="center", fill_opacity=0.5, stroke_width=0, ) self.play(FadeIn(trapInt)) self.wait(5) class Tangententrapezregel(GraphScene): CONFIG={ "x_min": -1, "x_max": 10, "x_axis_width": 9, "y_min": -1, "y_max": 10, "y_axis_height": 6, "camera_config":{"background_color":"#475147"}, "default_riemann_start_color": GREEN, "default_riemann_end_color": GREEN, } def construct(self): title=TextMobject("Tangententrapezregel").scale(1.5) subtitle=TextMobject("Verfahren zur numerischen Integration III") subtitle.next_to(title, DOWN) subtitle_alt=TextMobject("Trapetze unter einer Tangente durch $(c|f(c))$") subtitle_alt.next_to(title, DOWN) self.play(Write(title), Write(subtitle)) self.wait(2.5) self.play(Transform(subtitle,subtitle_alt)) self.wait(2.5) self.play(FadeOut(title), FadeOut(subtitle)) self.setup_axes(animate=True) #Koordinatensystem anzeigen def func(x): return 0.1 * (x + 3 - 5) * (x - 3 - 5) * (x - 5) + 5 #Darzustellende Funktion graph = self.get_graph(func, x_min=-1, x_max=10) #Graphen erstellen graph.set_color(RED) graph_label = self.get_graph_label(graph, label = "f") self.play(ShowCreation(graph), run_time=3) #Graphen anzeigen self.play(Write(graph_label)) self.wait() a_grenze = 3 a_grenze_line = self.get_vertical_line_to_graph(a_grenze, graph, line_class=DashedLine, color=BLUE) a_grenze_label = TextMobject("$a$", ).scale(0.75) a_grenze_label.next_to(a_grenze_line,DOWN) self.play(ShowCreation(a_grenze_line), Write(a_grenze_label)) self.wait(0.5) b_grenze = 6 b_grenze_line = self.get_vertical_line_to_graph(b_grenze, graph, line_class=DashedLine, color=BLUE) b_grenze_label = TextMobject("$b$").scale(0.75) b_grenze_label.next_to(b_grenze_line,DOWN) self.play(ShowCreation(b_grenze_line), Write(b_grenze_label)) self.wait(0.5) c_mitte = (a_grenze+b_grenze)/2 c_mitte_line = self.get_vertical_line_to_graph(c_mitte, graph, line_class=DashedLine, color=BLUE) c_mitte_label = MathTex(r"c=\frac{a+b}{2}").scale(0.5) c_mitte_label.next_to(c_mitte_line,DOWN) c_mitte_label2 = MathTex(r"f(c)= f\left( \frac{a+b}{2} \right)").scale(0.5) c_mitte_label2.next_to(c_mitte_line,UP).shift(0.2 * UP) self.play(ShowCreation(c_mitte_line), Write(c_mitte_label), Write(c_mitte_label2)) self.wait(0.5) c_punkt = ValueTracker(c_mitte) #Punkt in (c|f(c)) zeichnen c_punkt_wert = c_punkt.get_value() c_punkt_geometrie = Dot().move_to(self.coords_to_point(c_punkt_wert, func(c_punkt_wert))) self.play(FadeIn(c_punkt_geometrie)) self.wait(2) trapLine = self.get_graph(lambda x: -0.825*x+9.15, x_min=1, x_max=8, color=GREEN) self.play(FadeIn(trapLine)) self.wait(2) trapInt = self.get_riemann_rectangles( trapLine, x_min=3, x_max=6, dx=0.001, input_sample_type="center", fill_opacity=0.5, stroke_width=0, ) self.play(FadeIn(trapInt)) self.wait(5) class KeplerscheFassregel(GraphScene): CONFIG={ "x_min": -1, "x_max": 10, "x_axis_width": 9, "y_min": -1, "y_max": 10, "y_axis_height": 6, "camera_config":{"background_color":"#475147"}, "default_riemann_start_color": GREEN, "default_riemann_end_color": GREEN, } def construct(self): title=TextMobject("Simpsonregel/Kepler'sche Fassregel").scale(1.5) subtitle=TextMobject("Verfahren zur numerischen Integration IV") subtitle.next_to(title, DOWN) subtitle_alt=TextMobject("Parabel durch $(a|f(a))$, $(c|f(c))$ und $(b|f(b))$") subtitle_alt.next_to(title, DOWN) self.play(Write(title), Write(subtitle)) self.wait(2.5) self.play(Transform(subtitle,subtitle_alt)) self.wait(2.5) self.play(FadeOut(title), FadeOut(subtitle)) self.setup_axes(animate=True) #Koordinatensystem anzeigen def func(x): return 0.1 * (x + 3 - 5) * (x - 3 - 5) * (x - 5) + 5 #Darzustellende Funktion graph = self.get_graph(func, x_min=-1, x_max=10) #Graphen erstellen graph.set_color(RED) graph_label = self.get_graph_label(graph, label = "f") self.play(ShowCreation(graph), run_time=3) #Graphen anzeigen self.play(Write(graph_label)) self.wait() a_grenze = 3 a_grenze_line = self.get_vertical_line_to_graph(a_grenze, graph, line_class=DashedLine, color=BLUE) a_grenze_label = TextMobject("$a$", ).scale(0.75) a_grenze_label.next_to(a_grenze_line,DOWN) self.play(ShowCreation(a_grenze_line), Write(a_grenze_label)) self.wait(0.5) b_grenze = 6 b_grenze_line = self.get_vertical_line_to_graph(b_grenze, graph, line_class=DashedLine, color=BLUE) b_grenze_label = TextMobject("$b$").scale(0.75) b_grenze_label.next_to(b_grenze_line,DOWN) self.play(ShowCreation(b_grenze_line), Write(b_grenze_label)) self.wait(0.5) c_mitte = (a_grenze+b_grenze)/2 c_mitte_line = self.get_vertical_line_to_graph(c_mitte, graph, line_class=DashedLine, color=BLUE) c_mitte_label = MathTex(r"c=\frac{a+b}{2}").scale(0.5) c_mitte_label.next_to(c_mitte_line,DOWN) c_mitte_label2 = MathTex(r"f(c)= f\left( \frac{a+b}{2} \right)").scale(0.5) c_mitte_label2.next_to(c_mitte_line,UP) self.play(ShowCreation(c_mitte_line), Write(c_mitte_label), Write(c_mitte_label2)) self.wait(0.5) a_punkt = ValueTracker(a_grenze) #Punkt in (a|f(a)) zeichnen a_punkt_wert = a_punkt.get_value() a_punkt_geometrie = Dot().move_to(self.coords_to_point(a_punkt_wert, func(a_punkt_wert))) b_punkt = ValueTracker(b_grenze) #Punkt in (b|f(b)) zeichnen b_punkt_wert = b_punkt.get_value() b_punkt_geometrie = Dot().move_to(self.coords_to_point(b_punkt_wert, func(b_punkt_wert))) c_punkt = ValueTracker(c_mitte) #Punkt in (c|f(c)) zeichnen c_punkt_wert = c_punkt.get_value() c_punkt_geometrie = Dot().move_to(self.coords_to_point(c_punkt_wert, func(c_punkt_wert))) self.play(FadeIn(a_punkt_geometrie), FadeIn(b_punkt_geometrie), FadeIn(c_punkt_geometrie)) self.wait(2) x = [a_grenze, c_mitte, b_grenze] y = [func(x[0]), func(x[1]), func(x[2])] quadratische_regression = np.poly1d(np.polyfit(x, y, 2)) parabel = self.get_graph(quadratische_regression, x_min=1, x_max=8, color=GREEN) self.play(FadeIn(parabel)) self.wait(2) trapInt = self.get_riemann_rectangles( parabel, x_min=3, x_max=6, dx=0.001, input_sample_type="center", fill_opacity=0.5, stroke_width=0, ) self.play(FadeIn(trapInt)) self.wait(5)
39.539359
109
0.598658
1,851
13,562
4.103728
0.082118
0.043181
0.018957
0.008689
0.929042
0.924829
0.924829
0.913112
0.87783
0.840969
0
0.026839
0.280195
13,562
342
110
39.654971
0.75128
0.035688
0
0.826241
0
0.003546
0.078355
0.013787
0
0
0
0
0
1
0.028369
false
0
0.010638
0.014184
0.08156
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
0395afa492c73a2f30afb50e9fe824231315a3f7
8,797
py
Python
Common/cdr_origin_destination.py
SpatialDataCommons/CDR-analysis-tools-hadoop
8e02b72cbcfe031481659d75eb224b52437fae8e
[ "MIT" ]
6
2020-05-30T15:32:29.000Z
2021-08-17T09:23:45.000Z
Common/cdr_origin_destination.py
SpatialDataCommons/CDR-analysis-tools-hadoop
8e02b72cbcfe031481659d75eb224b52437fae8e
[ "MIT" ]
1
2020-12-15T14:18:54.000Z
2020-12-22T05:54:24.000Z
Common/cdr_origin_destination.py
SpatialDataCommons/CDR-analysis-tools-hadoop
8e02b72cbcfe031481659d75eb224b52437fae8e
[ "MIT" ]
3
2020-05-26T14:08:21.000Z
2022-01-21T09:59:54.000Z
from Common.hive_connection import HiveConnection import time from Common.helper import format_two_point_time, sql_to_string class OriginDestination: def __init__(self, config): self.config = config self.hc = HiveConnection() def calculate_od(self): self.cdr_by_uid() self.create_od() self.create_od_detail() self.create_od_sum() def cdr_by_uid(self): provider_prefix = self.config.provider_prefix od_admin_unit = self.config.od_admin_unit cursor = self.hc.cursor print('########## CREATE CDR BY UID TABLE ##########') timer = time.time() print('Checking and dropping {provider_prefix}_la_cdr_all_with_ant_zone_by_uid table if existing.' .format(provider_prefix=provider_prefix)) cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_la_cdr_all_with_ant_zone_by_uid' .format(provider_prefix=provider_prefix)) print('Checked and dropped {provider_prefix}_la_cdr_all_with_ant_zone_by_uid table if existing. ' 'Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) timer = time.time() print('Creating {provider_prefix}_la_cdr_all_with_ant_zone_by_uid table' .format(provider_prefix=provider_prefix)) raw_sql = sql_to_string('origin_destination/create_la_cdr_all_with_ant_zone_by_uid.sql') query = raw_sql.format(provider_prefix=provider_prefix) cursor.execute(query) print('Created {provider_prefix}_la_cdr_all_with_ant_zone_by_uid table. Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) timer = time.time() raw_sql = sql_to_string('origin_destination/insert_la_cdr_all_with_ant_zone_by_uid.sql') print('Inserting into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid table' .format(provider_prefix=provider_prefix)) query = raw_sql.format(provider_prefix=provider_prefix, target_admin=od_admin_unit, od_date=self.config.od_date) cursor.execute(query) print('Inserted into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid table. Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) print('########## FINISHED CREATING CDR BY UID TABLE ##########') def create_od(self): provider_prefix = self.config.provider_prefix od_admin_unit = self.config.od_admin_unit cursor = self.hc.cursor print('########## CREATE OD TABLE ##########') timer = time.time() print('Checking and dropping {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od table if existing.' .format(provider_prefix=provider_prefix)) cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od' .format(provider_prefix=provider_prefix)) print('Checked and dropped {provider_prefix}_la_cdr_all_with_ant_zone_by_uid table if existing.' ' Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) print('Creating {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od table'.format( provider_prefix=provider_prefix)) timer = time.time() raw_sql = sql_to_string('origin_destination/create_la_cdr_all_with_ant_zone_by_uid_od.sql') query = raw_sql.format(provider_prefix=provider_prefix) cursor.execute(query) print('Created {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od table. Elapsed time: {time}' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) timer = time.time() print('Inserting into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od table' .format(provider_prefix=provider_prefix)) raw_sql = sql_to_string('origin_destination/insert_la_cdr_all_with_ant_zone_by_uid_od.sql') query = raw_sql.format(provider_prefix=provider_prefix, target_unit=od_admin_unit) cursor.execute(query) print('Inserted into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od table. Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) print('########## FINISHED CREATING OD TABLE ##########') def create_od_detail(self): provider_prefix = self.config.provider_prefix cursor = self.hc.cursor print('########## CREATING OD DETAIL TABLE ##########') timer = time.time() print('Checking and dropping {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_detail table if existing.' .format(provider_prefix=provider_prefix)) cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_detail ' .format(provider_prefix=provider_prefix)) print('Checked and dropped {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_detail table if existing. ' 'Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) print('Creating {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_detail table'.format( provider_prefix=provider_prefix)) raw_sql = sql_to_string('origin_destination/create_la_cdr_all_with_ant_zone_by_uid_od_detail.sql') query = raw_sql.format(provider_prefix=provider_prefix) cursor.execute(query) print('Created {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_detail table'.format( provider_prefix=provider_prefix)) timer = time.time() print('Inserting into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_detail table'.format( provider_prefix=provider_prefix)) raw_sql = sql_to_string('origin_destination/insert_la_cdr_all_with_ant_zone_by_uid_od_detail.sql') query = raw_sql.format(provider_prefix=provider_prefix) cursor.execute(query) print('Inserted into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_detail table. ' 'Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) print('########## CREATING OD DETAIL TABLE ##########') def create_od_sum(self): provider_prefix = self.config.provider_prefix cursor = self.hc.cursor print('########## CREATING OD SUM TABLE ##########') timer = time.time() print('Checking and dropping {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_sum table if existing.' .format(provider_prefix=provider_prefix)) cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_sum ' .format(provider_prefix=provider_prefix)) print('Checked and dropped {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_sum table if existing. ' 'Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) print('Creating {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_sum table'.format( provider_prefix=provider_prefix)) raw_sql = sql_to_string('origin_destination/create_la_cdr_all_with_ant_zone_by_uid_od_sum.sql') query = raw_sql.format(provider_prefix=provider_prefix) cursor.execute(query) print('Created {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_sum table'.format( provider_prefix=provider_prefix)) timer = time.time() print('Inserting into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_sum table'.format( provider_prefix=provider_prefix)) raw_sql = sql_to_string('origin_destination/insert_la_cdr_all_with_ant_zone_by_uid_od_sum.sql') query = raw_sql.format(provider_prefix=provider_prefix) cursor.execute(query) print('Inserted into {provider_prefix}_la_cdr_all_with_ant_zone_by_uid_od_sum table. ' 'Elapsed time: {time} seconds' .format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time()))) raw_sql = sql_to_string('origin_destination/od_to_csv.sql') query = raw_sql.format(provider_prefix=provider_prefix) cursor.execute(query) print('OD Result is stored in /tmp/hive/od_result') print('########## FINISHED CREATING OD SUM TABLE ##########')
58.258278
120
0.706946
1,189
8,797
4.758621
0.061396
0.272181
0.130788
0.183104
0.917815
0.914457
0.914457
0.914457
0.914457
0.903853
0
0
0.193816
8,797
150
121
58.646667
0.7978
0
0
0.552239
0
0
0.393202
0.235194
0
0
0
0
0
1
0.044776
false
0
0.022388
0
0.074627
0.246269
0
0
0
null
1
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
20e20a87db9ee763da8a6bf469bc17fd4a5565b8
18,243
py
Python
tests/dhcpv4/kea_only/subnets_cmds/test_network_cmds.py
isc-projects/forge
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
[ "0BSD" ]
22
2015-02-27T11:51:05.000Z
2022-02-28T12:39:29.000Z
tests/dhcpv4/kea_only/subnets_cmds/test_network_cmds.py
isc-projects/forge
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
[ "0BSD" ]
16
2018-10-30T15:00:12.000Z
2019-01-11T17:55:13.000Z
tests/dhcpv4/kea_only/subnets_cmds/test_network_cmds.py
isc-projects/forge
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
[ "0BSD" ]
11
2015-02-27T11:51:36.000Z
2021-03-30T08:33:54.000Z
"""Kea Subnet manipulation commands""" # pylint: disable=invalid-name,line-too-long import pytest import srv_control import srv_msg import misc @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_list(): misc.test_setup() srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1') srv_control.config_srv_another_subnet_no_interface('192.168.51.0/24', '192.168.51.1-192.168.51.1') srv_control.config_srv_another_subnet_no_interface('192.168.52.0/24', '192.168.52.1-192.168.52.1') srv_control.config_srv_another_subnet_no_interface('192.168.53.0/24', '192.168.53.1-192.168.53.1') srv_control.config_srv('time-servers', 0, '199.199.199.10') srv_control.config_srv('time-servers', 2, '199.199.199.100') srv_control.config_srv('time-servers', 3, '199.199.199.200') # first shared subnet srv_control.shared_subnet('192.168.50.0/24', 0) srv_control.shared_subnet('192.168.51.0/24', 0) srv_control.set_conf_parameter_shared_subnet('name', '"name-abc"', 0) srv_control.set_conf_parameter_shared_subnet('interface', '"$(SERVER_IFACE)"', 0) # second shared-subnet srv_control.shared_subnet('192.168.52.0/24', 1) srv_control.shared_subnet('192.168.53.0/24', 1) srv_control.set_conf_parameter_shared_subnet('name', '"name-xyz"', 1) srv_control.set_conf_parameter_shared_subnet('relay', '{"ip-address":"$(GIADDR4)"}', 1) srv_control.add_hooks('libdhcp_subnet_cmds.so') srv_control.open_control_channel() srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-list","arguments":{}}') @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_get_by_name(): misc.test_setup() srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1') srv_control.config_srv_another_subnet_no_interface('192.168.51.0/24', '192.168.51.1-192.168.51.1') srv_control.config_srv_another_subnet_no_interface('192.168.52.0/24', '192.168.52.1-192.168.52.1') srv_control.config_srv_another_subnet_no_interface('192.168.53.0/24', '192.168.53.1-192.168.53.1') srv_control.config_srv('time-servers', 0, '199.199.199.10') srv_control.config_srv('time-servers', 2, '199.199.199.100') srv_control.config_srv('time-servers', 3, '199.199.199.200') srv_control.add_hooks('libdhcp_subnet_cmds.so') # first shared subnet srv_control.shared_subnet('192.168.50.0/24', 0) srv_control.shared_subnet('192.168.51.0/24', 0) srv_control.set_conf_parameter_shared_subnet('name', '"name-abc"', 0) srv_control.set_conf_parameter_shared_subnet('interface', '"$(SERVER_IFACE)"', 0) # second shared-subnet srv_control.shared_subnet('192.168.52.0/24', 1) srv_control.shared_subnet('192.168.53.0/24', 1) srv_control.set_conf_parameter_shared_subnet('name', '"name-xyz"', 1) srv_control.set_conf_parameter_shared_subnet('relay', '{"ip-address":"$(GIADDR4)"}', 1) srv_control.open_control_channel() srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-get","arguments":{"name":"name-xyz"}}') @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_add(): misc.test_setup() srv_control.config_srv_subnet('$(EMPTY)', '$(EMPTY)') srv_control.config_srv_opt('domain-name-servers', '199.199.199.1,100.100.100.1') srv_control.open_control_channel() srv_control.add_hooks('libdhcp_subnet_cmds.so') srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_dont_wait_for_message() srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-list","arguments":{}}', exp_result=3) srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-add","arguments":{"shared-networks": [{"name": "name-xyz","rebind-timer": 100,"renew-timer": 100,"valid-lifetime": 400,"subnet4": [{"interface": "$(SERVER_IFACE)", "pools": [{"pool": "192.168.50.1/32"}],"rebind-timer": 2000,"renew-timer": 1000,"subnet": "192.168.50.0/24","valid-lifetime": 4000}]}]}}') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-get","arguments":{"name": "name-xyz"}}') srv_msg.forge_sleep(3, 'seconds') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_requests_option(6) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_wait_for_message('MUST', 'OFFER') @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_add_conflict(): misc.test_setup() srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1') srv_control.config_srv_another_subnet_no_interface('192.168.51.0/24', '192.168.51.1-192.168.51.1') srv_control.config_srv_another_subnet_no_interface('192.168.52.0/24', '192.168.52.1-192.168.52.1') srv_control.config_srv_another_subnet_no_interface('192.168.53.0/24', '192.168.53.1-192.168.53.1') srv_control.config_srv('time-servers', 0, '199.199.199.10') srv_control.config_srv('time-servers', 2, '199.199.199.100') srv_control.config_srv('time-servers', 3, '199.199.199.200') srv_control.add_hooks('libdhcp_subnet_cmds.so') # first shared subnet srv_control.shared_subnet('192.168.50.0/24', 0) srv_control.shared_subnet('192.168.51.0/24', 0) srv_control.set_conf_parameter_shared_subnet('name', '"name-abc"', 0) srv_control.set_conf_parameter_shared_subnet('interface', '"$(SERVER_IFACE)"', 0) # second shared-subnet srv_control.shared_subnet('192.168.52.0/24', 1) srv_control.shared_subnet('192.168.53.0/24', 1) srv_control.set_conf_parameter_shared_subnet('name', '"name-xyz"', 1) srv_control.set_conf_parameter_shared_subnet('relay', '{"ip-address":"$(GIADDR4)"}', 1) srv_control.open_control_channel() srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-list","arguments":{}}') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-add","arguments":{"shared-networks": [{"match-client-id": true,"name": "name-xyz","option-data": [],"rebind-timer": 0,"relay": {"ip-address": "0.0.0.0"},"renew-timer": 0,"reservation-mode": "all","subnet4": [{"4o6-interface": "","4o6-interface-id": "","4o6-subnet": "","boot-file-name": "","id": 3,"match-client-id": true,"next-server": "0.0.0.0","option-data": [{"always-send": false,"code": 4,"csv-format": false,"data": "C7C7C764","name": "time-servers","space": "dhcp4"}],"pools": [{"option-data": [],"pool": "192.168.52.1/32"}],"rebind-timer": 2000,"relay": {"ip-address": "192.168.50.249"},"renew-timer": 1000,"reservation-mode": "all","server-hostname": "","subnet": "192.168.52.0/24","valid-lifetime": 4000},{"4o6-interface": "","4o6-interface-id": "","4o6-subnet": "","boot-file-name": "","id": 4,"match-client-id": true,"next-server": "0.0.0.0","option-data": [{"always-send": false,"code": 4,"csv-format": false,"data": "C7C7C7C8","name": "time-servers","space": "dhcp4"}],"pools": [{"option-data": [],"pool": "192.168.53.1/32"}],"rebind-timer": 2000,"relay": {"ip-address": "192.168.50.249"},"renew-timer": 1000,"reservation-mode": "all","server-hostname": "","subnet": "192.168.53.0/24","valid-lifetime": 4000}],"valid-lifetime": 0}]}}', exp_result=1) srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-get","arguments":{"name": "name-xyz"}}') @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_del(): misc.test_setup() srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1') srv_control.config_srv_another_subnet_no_interface('192.168.51.0/24', '192.168.51.1-192.168.51.1') srv_control.config_srv('time-servers', 0, '199.199.199.10') srv_control.shared_subnet('192.168.50.0/24', 0) srv_control.shared_subnet('192.168.51.0/24', 0) srv_control.set_conf_parameter_shared_subnet('name', '"name-abc"', 0) srv_control.set_conf_parameter_shared_subnet('interface', '"$(SERVER_IFACE)"', 0) srv_control.add_hooks('libdhcp_subnet_cmds.so') srv_control.open_control_channel() srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_requests_option(6) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_wait_for_message('MUST', 'OFFER') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-list","arguments":{}}') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-del","arguments":{"name":"name-abc","subnets-action": "delete"}}') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_dont_wait_for_message() @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_del_keep_subnet(): misc.test_setup() srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1') srv_control.config_srv_another_subnet_no_interface('192.168.51.0/24', '192.168.51.1-192.168.51.1') srv_control.config_srv('time-servers', 0, '199.199.199.10') srv_control.shared_subnet('192.168.50.0/24', 0) srv_control.shared_subnet('192.168.51.0/24', 0) srv_control.set_conf_parameter_shared_subnet('name', '"name-abc"', 0) srv_control.set_conf_parameter_shared_subnet('interface', '"$(SERVER_IFACE)"', 0) srv_control.add_hooks('libdhcp_subnet_cmds.so') srv_control.open_control_channel() srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_requests_option(6) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_wait_for_message('MUST', 'OFFER') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-list","arguments":{}}') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-del","arguments":{"name":"name-abc","subnets-action": "keep"}}') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_requests_option(6) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_wait_for_message('MUST', 'OFFER') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-list","arguments":{}}', exp_result=3) @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_del_non_existing(): misc.test_setup() srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1') srv_control.config_srv_another_subnet_no_interface('192.168.51.0/24', '192.168.51.1-192.168.51.1') srv_control.config_srv_another_subnet_no_interface('192.168.52.0/24', '192.168.52.1-192.168.52.1') srv_control.config_srv_another_subnet_no_interface('192.168.53.0/24', '192.168.53.1-192.168.53.1') srv_control.config_srv('time-servers', 0, '199.199.199.10') srv_control.config_srv('time-servers', 2, '199.199.199.100') srv_control.config_srv('time-servers', 3, '199.199.199.200') srv_control.add_hooks('libdhcp_subnet_cmds.so') # first shared subnet srv_control.shared_subnet('192.168.50.0/24', 0) srv_control.shared_subnet('192.168.51.0/24', 0) srv_control.set_conf_parameter_shared_subnet('name', '"name-abc"', 0) srv_control.set_conf_parameter_shared_subnet('interface', '"$(SERVER_IFACE)"', 0) # second shared-subnet srv_control.shared_subnet('192.168.52.0/24', 1) srv_control.shared_subnet('192.168.53.0/24', 1) srv_control.set_conf_parameter_shared_subnet('name', '"name-xyz"', 1) srv_control.set_conf_parameter_shared_subnet('relay', '{"ip-address":"$(GIADDR4)"}', 1) srv_control.open_control_channel() srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-del","arguments":{"name":"name-xxyz,"subnets-action": "delete""}}', exp_result=1) srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-list","arguments":{}}') @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_del_global_options(): misc.test_setup() srv_control.config_srv_opt('domain-name-servers', '199.199.199.1,100.100.100.1') srv_control.config_srv_subnet('192.168.50.0/24', '$(EMPTY)') srv_control.config_srv_another_subnet_no_interface('192.168.51.0/24', '192.168.51.1-192.168.51.1') # first shared subnet srv_control.shared_subnet('192.168.51.0/24', 0) srv_control.set_conf_parameter_shared_subnet('name', '"name-abc"', 0) srv_control.set_conf_parameter_shared_subnet('interface', '"$(SERVER_IFACE)"', 0) srv_control.open_control_channel() srv_control.add_hooks('libdhcp_subnet_cmds.so') srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_requests_option(6) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_wait_for_message('MUST', 'OFFER') srv_msg.response_check_include_option(1) srv_msg.response_check_content('yiaddr', '192.168.51.1') srv_msg.response_check_option_content(1, 'value', '255.255.255.0') srv_msg.send_ctrl_cmd_via_socket('{"command": "network4-del","arguments":{"name":"name-abc","subnets-action": "delete"}}') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_dont_wait_for_message() # That needs subnet with empty pool to work misc.test_procedure() srv_msg.client_requests_option(6) srv_msg.client_sets_value('Client', 'ciaddr', '$(CIADDR)') srv_msg.client_send_msg('INFORM') misc.pass_criteria() srv_msg.send_wait_for_message('MUST', 'ACK') srv_msg.response_check_include_option(6) srv_msg.response_check_option_content(6, 'value', '199.199.199.1') srv_msg.response_check_option_content(6, 'value', '100.100.100.1') @pytest.mark.v4 @pytest.mark.kea_only @pytest.mark.controlchannel @pytest.mark.hook @pytest.mark.network_cmds def test_hook_v4_network_cmds_add_and_del(): misc.test_setup() srv_control.config_srv_subnet('$(EMPTY)', '$(EMPTY)') srv_control.config_srv_opt('domain-name-servers', '199.199.199.1,100.100.100.1') srv_control.open_control_channel() srv_control.add_hooks('libdhcp_subnet_cmds.so') srv_control.build_and_send_config_files() srv_control.start_srv('DHCP', 'started') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_dont_wait_for_message() srv_msg.send_ctrl_cmd_via_socket('{"command":"network4-add","arguments":{"shared-networks": [{"name": "name-xyz","rebind-timer": 100,"renew-timer": 100,"valid-lifetime": 400,"subnet4": [{"interface": "$(SERVER_IFACE)", "pools": [{"pool": "192.168.50.1/32"}],"rebind-timer": 2000,"renew-timer": 1000,"subnet": "192.168.50.0/24","valid-lifetime": 4000}]}]}}') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_requests_option(6) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_wait_for_message('MUST', 'OFFER') srv_msg.response_check_include_option(1) srv_msg.response_check_content('yiaddr', '192.168.50.1') srv_msg.response_check_option_content(1, 'value', '255.255.255.0') srv_msg.send_ctrl_cmd_via_socket('{"command": "network4-del","arguments":{"name":"name-xyz","subnets-action": "delete"}}') misc.test_procedure() srv_msg.client_requests_option(1) srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04') srv_msg.client_send_msg('DISCOVER') misc.pass_criteria() srv_msg.send_dont_wait_for_message()
45.493766
1,309
0.686181
2,749
18,243
4.241179
0.064387
0.103782
0.043228
0.066815
0.962175
0.956857
0.953255
0.952054
0.944078
0.942362
0
0.10007
0.141643
18,243
400
1,310
45.6075
0.644486
0.016554
0
0.912903
0
0.025806
0.329038
0.148675
0
0
0
0
0
1
0.029032
true
0.03871
0.012903
0
0.041935
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
9
4561282cbdc703b01648f24bcbe28a438fcc31e0
136
py
Python
a/utils.py
zhangalbert/a
466b10ded2c85e7fceea60a95f081cb0b11b5222
[ "Apache-2.0" ]
null
null
null
a/utils.py
zhangalbert/a
466b10ded2c85e7fceea60a95f081cb0b11b5222
[ "Apache-2.0" ]
null
null
null
a/utils.py
zhangalbert/a
466b10ded2c85e7fceea60a95f081cb0b11b5222
[ "Apache-2.0" ]
null
null
null
import json from webob import Response def jsonfy(**kwargs): return Response(json.dumps(kwargs), content_type='application/json')
19.428571
72
0.764706
18
136
5.722222
0.722222
0
0
0
0
0
0
0
0
0
0
0
0.125
136
6
73
22.666667
0.865546
0
0
0
0
0
0.117647
0
0
0
0
0
0
1
0.25
true
0
0.5
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
1
1
0
0
7
2fb51f5585ff5a4a9c60b4b09f0e7718737e0693
7,881
py
Python
demisto_sdk/commands/lint/tests/test_pylint_plugin/partner_level_checker_test.py
sturmianseq/demisto-sdk
67ce7ee70ccd557d661e03a60469301c5cbcb9c0
[ "MIT" ]
42
2019-11-07T13:02:00.000Z
2022-03-29T03:39:04.000Z
demisto_sdk/commands/lint/tests/test_pylint_plugin/partner_level_checker_test.py
sturmianseq/demisto-sdk
67ce7ee70ccd557d661e03a60469301c5cbcb9c0
[ "MIT" ]
1,437
2019-11-07T13:02:25.000Z
2022-03-31T12:48:11.000Z
demisto_sdk/commands/lint/tests/test_pylint_plugin/partner_level_checker_test.py
sturmianseq/demisto-sdk
67ce7ee70ccd557d661e03a60469301c5cbcb9c0
[ "MIT" ]
46
2019-12-09T21:44:30.000Z
2022-03-24T17:36:45.000Z
import astroid import pylint.testutils from demisto_sdk.commands.lint.resources.pylint_plugins import \ partner_level_checker # You can find documentation about adding new test checker here: # http://pylint.pycqa.org/en/latest/how_tos/custom_checkers.html#write-a-checker class TestTryExceptMainChecker(pylint.testutils.CheckerTestCase): """ Class which tests the functionality of sys exit checker . """ CHECKER_CLASS = partner_level_checker.PartnerChecker def test_try_except_exists(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - in main function, try except statement exists. Then: - Ensure that it does not raise any errors, Check that there is no error message. """ node_b = astroid.extract_node(""" def test_function(): sys.exit(1) return True def main(): try: return True except: return False return_error('error') """) assert node_b is not None with self.assertNoMessages(): self.checker.visit_functiondef(node_b) def test_try_except_finally_exists(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - in main function, try-except-finally statement exists. Then: - Ensure that it does not raise any errors, Check that there is no error message. """ node_b = astroid.extract_node(""" def test_function(): sys.exit(1) return True def main(): try: return True except: return False return_error('error') finally: pass """) assert node_b with self.assertNoMessages(): self.checker.visit_functiondef(node_b) def test_try_except_doesnt_exists(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - in main function , there is no try except statement. Then: - Ensure that the correct message id is being added to the message errors of pylint """ node_b = astroid.extract_node(""" def test_function(): sys.exit(1) return True def main(): return True return_error('err') """) assert node_b is not None with self.assertAddsMessages( pylint.testutils.Message( msg_id='try-except-main-doesnt-exists', node=node_b, ), ): self.checker.visit_functiondef(node_b) class TestReturnErrorInMainChecker(pylint.testutils.CheckerTestCase): """ Class which tests the functionality of sys exit checker . """ CHECKER_CLASS = partner_level_checker.PartnerChecker def test_return_error_exists(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - return_error exists in main function. Then: - Ensure that it does not raise any errors, Check that there is no error message. """ node_b = astroid.extract_node(""" def test_function(): sys.exit(1) return True def main(): try: return True except: return_error('not ok') """) assert node_b is not None with self.assertNoMessages(): self.checker.visit_functiondef(node_b) def test_return_error_dosnt_exists_in_main(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - in main function , there is no return_error statement and in no other section in the code. Then: - Ensure that the correct message id is being added to the messages of pylint """ node_b = astroid.extract_node(""" def test_function(): sys.exit(1) return True def main(): try: return True except: return False """) assert node_b is not None with self.assertAddsMessages( pylint.testutils.Message( msg_id='return-error-does-not-exist-in-main', node=node_b, ), ): self.checker.visit_functiondef(node_b) def test_return_error_exists_not_in_main(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - return_error statment exists but not in main function but in a different one. Then: - Ensure that the correct message id is being added to the messages of pylint """ node_b = astroid.extract_node(""" def test_function(): sys.exit(1) return_error('error') def main(): try: return True except: return False """) assert node_b is not None with self.assertAddsMessages( pylint.testutils.Message( msg_id='return-error-does-not-exist-in-main', node=node_b, ), ): self.checker.visit_functiondef(node_b) class TestReturnErrorCountChecker(pylint.testutils.CheckerTestCase): """ Class which tests the functionality of sys exit checker . """ CHECKER_CLASS = partner_level_checker.PartnerChecker def test_return_error_exists_once(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - return_error exists only once. Then: - Ensure that it does not raise any errors, Check that there is no error message. """ node_b = astroid.extract_node(""" def test_function(): sys.exit(1) return True def main(): try: return True except: return_error('not ok') """) assert node_b is not None with self.assertNoMessages(): self.checker.visit_call(node_b) self.checker.leave_module(node_b) def test_return_error_exists_more_than_once(self): """ Given: - String of a code part which is being examined by pylint plugin. When: - return_error usage exists more than once in the code. Then: - Ensure that the correct message id is being added to the messages of pylint """ node_a, node_b = astroid.extract_node(""" return_error() def test_function(): return_error('again') #@ def main(): try: return True except: return_error('not ok') #@ """) assert node_b is not None with self.assertAddsMessages( pylint.testutils.Message( msg_id='too-many-return-error', node=node_b, ), ): self.checker.visit_call(node_b) self.checker.visit_call(node_a) self.checker.leave_module(node_b)
31.650602
104
0.530897
848
7,881
4.783019
0.148585
0.036982
0.035503
0.033531
0.835552
0.829882
0.819527
0.80646
0.792653
0.792653
0
0.001477
0.398553
7,881
248
105
31.778226
0.854219
0.279152
0
0.823129
0
0
0.414634
0.039805
0
0
0
0
0.108844
1
0.054422
false
0.006803
0.020408
0
0.238095
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2fedfa3d306323a242a86b631c26d6d97559fcc7
200
py
Python
pypipe-core/pypipe/tools/freebayes.py
ctlab/pypipe
f20da66d915a0503ff367b648987c75620814b33
[ "MIT" ]
2
2015-02-19T14:25:31.000Z
2017-07-18T20:49:28.000Z
pypipe-core/pypipe/tools/freebayes.py
ctlab/pypipe
f20da66d915a0503ff367b648987c75620814b33
[ "MIT" ]
null
null
null
pypipe-core/pypipe/tools/freebayes.py
ctlab/pypipe
f20da66d915a0503ff367b648987c75620814b33
[ "MIT" ]
null
null
null
from pypipe.tools.toolsconfig import Freebayes from pypipe.utils import tool, check_if_program_exists check_if_program_exists('freebayes') @tool def freebayes(): return Freebayes.freebayes()
16.666667
54
0.805
26
200
5.961538
0.538462
0.129032
0.180645
0.258065
0
0
0
0
0
0
0
0
0.12
200
11
55
18.181818
0.880682
0
0
0
0
0
0.045226
0
0
0
0
0
0
1
0.166667
true
0
0.333333
0.166667
0.666667
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
7
2ff8bd6a857664c300093e0cd9e42f576163e4cb
1,607
py
Python
src/ikazuchi/tests/data/rst/parse_lineblock.py
t2y/ikazuchi
7023111e92fa47360c50cfefd1398c554475f2c6
[ "Apache-2.0" ]
null
null
null
src/ikazuchi/tests/data/rst/parse_lineblock.py
t2y/ikazuchi
7023111e92fa47360c50cfefd1398c554475f2c6
[ "Apache-2.0" ]
null
null
null
src/ikazuchi/tests/data/rst/parse_lineblock.py
t2y/ikazuchi
7023111e92fa47360c50cfefd1398c554475f2c6
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- DATA_SET = [ ( [u"| first line", u"| second line", u""], [("ln", [u"| first line", u"| second line"], u""), 1] ), ( [u"| first line", u"| second line"], [("ln", [u"| first line", u"| second line"], u""), 1] ), ( [u"| first line", u"| second line", u""], [("ln", [u"| first line", u"| second line"], u""), 1] ), ( [u"| first line", u"| second line", u"| third line", u""], [("ln", [u"| first line", u"| second line", u"| third line"], u""), 2] ), ( [u"| first line", u"| second line", u"| third line", u"| fourth line", u""], [("ln", [u"| first line", u"| second line", u"| third line", u"| fourth line"], u""), 3] ), ( [u"| first line", u"|", u"| second line", u""], [("ln", [u"| first line", u"|", u"| second line"], u""), 2] ), ( [u"| first line", u"|", u"| second line", u"|", u""], [("ln", [u"| first line", u"|", u"| second line", u"|"], u""), 3] ), ]
18.686047
32
0.257623
140
1,607
2.95
0.107143
0.399516
0.338983
0.372881
0.953995
0.953995
0.953995
0.949153
0.922518
0.837772
0
0.011065
0.550093
1,607
85
33
18.905882
0.560166
0.013068
0
0.843373
0
0
0.305556
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
11
64208dd5297376c33684041f2a39221e73440674
284
py
Python
nighres/segmentation/__init__.py
ahleighton/nighres
bc01463241a03a88569b3ba56e195127788b5639
[ "Apache-2.0" ]
41
2017-08-15T12:23:31.000Z
2022-02-28T15:12:22.000Z
nighres/segmentation/__init__.py
ahleighton/nighres
bc01463241a03a88569b3ba56e195127788b5639
[ "Apache-2.0" ]
130
2017-07-27T11:09:09.000Z
2022-03-31T10:05:07.000Z
nighres/segmentation/__init__.py
ahleighton/nighres
bc01463241a03a88569b3ba56e195127788b5639
[ "Apache-2.0" ]
35
2017-08-17T17:05:41.000Z
2022-03-28T12:22:14.000Z
from nighres.segmentation.conditional_shape import conditional_shape from nighres.segmentation.conditional_shape import conditional_shape_atlasing from nighres.segmentation.conditional_shape import conditional_shape_updating from nighres.segmentation.fuzzy_cmeans import fuzzy_cmeans
56.8
77
0.915493
34
284
7.352941
0.294118
0.384
0.368
0.408
0.732
0.732
0.732
0.732
0
0
0
0
0.056338
284
4
78
71
0.932836
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
1
1
1
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
10
642b886e639c184120547145e09719f51325cd03
1,893
py
Python
dogehouse/core/User.py
ItzAmirreza/dogehouse.py
16a9c54fdc83d6e188cbf2785e5ccb7e718822e1
[ "MIT" ]
2
2021-05-04T19:59:56.000Z
2021-05-05T00:28:36.000Z
dogehouse/core/User.py
ItzAmirreza/dogify.py
16a9c54fdc83d6e188cbf2785e5ccb7e718822e1
[ "MIT" ]
null
null
null
dogehouse/core/User.py
ItzAmirreza/dogify.py
16a9c54fdc83d6e188cbf2785e5ccb7e718822e1
[ "MIT" ]
null
null
null
import datetime class User: def __init__(self, data): self.id = data["id"] self.username = data["username"] self.avatarUrl = data["avatarUrl"] self.bannerUrl = data["bannerUrl"] self.bio = data["bio"] self.botOwnerId = data["botOwnerId"] self.currentRoomId = data["currentRoomId"] self.displayName = data["displayName"] self.followers = data["followsYou"] self.following = data["youAreFollowing"] self.blockedUsers = data["iBlockedThem"] try: self.lastonlinedatetime = datetime.datetime.strptime( f"{data['lastOnline'].split('T')[0]} {data['lastOnline'].split('T')[1].replace('Z', '')}", '%Y-%m-%d %H:%M:%S.%f') except: self.lastonlinedatetime = None self.followersCount = data["numFollowers"] self.followingCount = data["numFollowing"] self.roomPermissions = data["roomPermissions"] async def update(self, data): self.id = data["id"] self.username = data["username"] self.avatarUrl = data["avatarUrl"] self.bannerUrl = data["bannerUrl"] self.bio = data["bio"] self.botOwnerId = data["botOwnerId"] self.currentRoomId = data["currentRoomId"] self.displayName = data["displayName"] self.followers = data["followsYou"] self.following = data["youAreFollowing"] self.blockedUsers = data["iBlockedThem"] try: self.lastonlinedatetime = datetime.datetime.strptime( f"{data['lastOnline'].split('T')[0]} {data['lastOnline'].split('T')[1].replace('Z', '')}", '%Y-%m-%d %H:%M:%S.%f') except: self.lastonlinedatetime = None self.followersCount = data["numFollowers"] self.followingCount = data["numFollowing"] self.roomPermissions = data["roomPermissions"]
39.4375
130
0.593767
185
1,893
6.054054
0.254054
0.078571
0.067857
0.071429
0.960714
0.960714
0.960714
0.960714
0.960714
0.960714
0
0.002811
0.248283
1,893
47
131
40.276596
0.784259
0
0
0.904762
0
0.047619
0.260961
0.084522
0
0
0
0
0
1
0.02381
false
0
0.02381
0
0.071429
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
642e733ef283f98dc9dc2670ef7bf025f16c89ba
13,762
py
Python
tests/test_dyads_expectation.py
nicoloval/iterative_reconstruction
09fc440b75e8af1e8c52973b520309b6bdcc8142
[ "MIT" ]
null
null
null
tests/test_dyads_expectation.py
nicoloval/iterative_reconstruction
09fc440b75e8af1e8c52973b520309b6bdcc8142
[ "MIT" ]
null
null
null
tests/test_dyads_expectation.py
nicoloval/iterative_reconstruction
09fc440b75e8af1e8c52973b520309b6bdcc8142
[ "MIT" ]
null
null
null
'''Test function dyads_expectation calculation ''' import os import sys sys.path.append('../') import sample import numpy as np import scipy.sparse import unittest # test tool class MyTest(unittest.TestCase): def setUp(self): pass def test_dcm_dyads(self): A = np.array( [[0, 1, 1, 0, 0], [1, 0, 1, 0, 0], [0, 0, 0, 1, 0], [1, 1, 0, 0, 1], [0, 0, 1, 0, 0]] ) dyads_count = 1 dyads_analytical_expectation = 5 sol = np.array([1]*10) dyads_fun_expectation = sample.expected_dyads(sol, method='dcm') self.assertTrue(dyads_fun_expectation == dyads_analytical_expectation) def test_dcm_singles(self): A = np.array( [[0, 1, 1, 0, 0], [1, 0, 1, 0, 0], [0, 0, 0, 1, 0], [1, 1, 0, 0, 1], [0, 0, 1, 0, 0]] ) dyads_analytical_expectation = 5 sol = np.array([1]*10) dyads_fun_expectation = sample.expected_dyads(sol, method='dcm', t='singles') #print(dyads_fun_expectation) self.assertTrue(dyads_fun_expectation == dyads_analytical_expectation) def test_dcm_zeros(self): A = np.array( [[0, 1, 1, 0, 0], [1, 0, 1, 0, 0], [0, 0, 0, 1, 0], [1, 1, 0, 0, 1], [0, 0, 1, 0, 0]] ) dyads_analytical_expectation = 5 sol = np.array([1]*10) dyads_fun_expectation = sample.expected_dyads(sol, method='dcm', t='zeros') # print(dyads_fun_expectation) self.assertTrue(dyads_fun_expectation == dyads_analytical_expectation) def test_dcm_rd_dyads(self): A = np.array( [[0, 1, 1, 0, 0], [1, 0, 1, 0, 0], [0, 0, 0, 1, 0], [1, 1, 0, 0, 1], [0, 0, 1, 0, 0]] ) dyads_count = 1 dyads_analytical_expectation = 5 sol = np.array([1]*8) """ c = [2, 0, 0, 0] dyads_fun_expectation = sample.expected_dyads_dcm_rd(sol, c) print(dyads_fun_expectation) print(dyads_analytical_expectation) """ dyads_fun_expectation = sample.expected_dyads(sol, method='dcm_rd', A=A) #print(dyads_fun_expectation) #print(dyads_analytical_expectation) self.assertTrue(dyads_fun_expectation == dyads_analytical_expectation) """ def test_dcm_dyads_emseble_empirical_1(self): A = np.array( [[0, 1, 1, 0, 0], [1, 0, 1, 0, 0], [0, 0, 0, 1, 0], [1, 1, 0, 0, 1], [0, 0, 1, 0, 0]] ) # solve the netrec problem sol, step, diff = sample.iterative_solver(A, max_steps = 300, eps = 0.01) # find the analytical expectation # dyads dyads_fun_expectation = sample.expected_dyads(sol, method='dcm') # singles singles_fun_expectation = sample.expected_dyads(sol, method='dcm', t='singles') # zeros zeros_fun_expectation = sample.expected_dyads(sol, method='dcm', t='zeros') # sample 100 networks (dcm method by default) s_dir = 'tmp' sample.ensemble_sampler(sol=sol, m=100, method='dcm', sample_dir=s_dir) # count empirical dyads for each sampled network files = os.listdir(s_dir) dyads_l = [] singles_l = [] zeros_l = [] for f in files: fA = scipy.sparse.load_npz(s_dir + '/' + f) dyads = sample.dyads_count(fA) dyads_l.append(dyads) singles = sample.singles_count(fA) singles_l.append(singles) zeros = sample.zeros_count(fA) zeros_l.append(zeros) # compute empirical average dyads_empirical_expectation = np.average(dyads_l) singles_empirical_expectation = np.average(singles_l) zeros_empirical_expectation = np.average(zeros_l) # debug print('Empirical test 1') print('dyads') print(dyads_fun_expectation) print(dyads_empirical_expectation) print('singles') print(singles_fun_expectation) print(singles_empirical_expectation) print('zeros') print(zeros_fun_expectation) print(zeros_empirical_expectation) # remove ensemble directory files = os.listdir(s_dir) for f in files: os.remove(s_dir + '/' + f) os.rmdir(s_dir) # testing self.assertTrue(np.allclose(dyads_fun_expectation, dyads_empirical_expectation, atol=1e-02, rtol=1e-02)) self.assertTrue(np.allclose(singles_fun_expectation, singles_empirical_expectation, atol=1e-02, rtol=1e-02)) self.assertTrue(np.allclose(zeros_fun_expectation, zeros_empirical_expectation, atol=1e-02, rtol=1e-02)) """ """ def test_dcm_dyads_emseble_empirical_2(self): A = np.array([[0, 1, 0, 0, 0, 0, 1], [1, 0, 0, 1, 0, 0, 0], [0, 0, 0, 1, 0, 0, 1], [0, 1, 0, 0, 1, 0, 1], [1, 1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0], [1, 0, 0, 0, 1, 0, 0]] ) # solve the netrec problem sol, step, diff = sample.iterative_solver(A, max_steps = 300, eps = 0.01) # find the analytical expectation # dyads dyads_fun_expectation = sample.expected_dyads(sol, method='dcm') # singles singles_fun_expectation = sample.expected_dyads(sol, method='dcm', t='singles') # zeros zeros_fun_expectation = sample.expected_dyads(sol, method='dcm', t='zeros') # sample 100 networks (dcm method by default) s_dir = 'tmp' sample.ensemble_sampler(sol=sol, m=100, method='dcm', sample_dir=s_dir) # count empirical dyads for each sampled network files = os.listdir(s_dir) dyads_l = [] singles_l = [] zeros_l = [] for f in files: fA = scipy.sparse.load_npz(s_dir + '/' + f) dyads = sample.dyads_count(fA) dyads_l.append(dyads) singles = sample.singles_count(fA) singles_l.append(singles) zeros = sample.zeros_count(fA) zeros_l.append(zeros) # compute empirical average dyads_empirical_expectation = np.average(dyads_l) singles_empirical_expectation = np.average(singles_l) zeros_empirical_expectation = np.average(zeros_l) # debug print('Empirical test 2') print('dyads') print('analytical: {}'.format(dyads_fun_expectation)) print('Empirical : {}'.format(dyads_empirical_expectation)) print('singles') print(singles_fun_expectation) print(singles_empirical_expectation) print('zeros') print(zeros_fun_expectation) print(zeros_empirical_expectation) # remove ensemble directory files = os.listdir(s_dir) for f in files: os.remove(s_dir + '/' + f) os.rmdir(s_dir) # testing self.assertTrue(np.allclose(dyads_fun_expectation, dyads_empirical_expectation, atol=1e-02, rtol=1e-02)) self.assertTrue(np.allclose(singles_fun_expectation, singles_empirical_expectation, atol=1e-02, rtol=1e-02)) self.assertTrue(np.allclose(zeros_fun_expectation, zeros_empirical_expectation, atol=1e-02, rtol=1e-02)) """ def test_dcm_dyads_emseble_empirical(self): A = np.array([[0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 1.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]]) # solve the netrec problem sol, step, diff = sample.iterative_solver(A, max_steps = 300, eps = 0.01, method='dcm') # find the analytical expectation # dyads dyads_fun_expectation = sample.expected_dyads(sol, method='dcm') # singles singles_fun_expectation = sample.expected_dyads(sol, method='dcm', t='singles') # zeros zeros_fun_expectation = sample.expected_dyads(sol, method='dcm', t='zeros') # sample 100 networks (dcm method by default) s_dir = 'tmp' sample.ensemble_sampler(sol=sol, m=1000, method='dcm', sample_dir=s_dir) # count empirical dyads for each sampled network files = os.listdir(s_dir) dyads_l = [] singles_l = [] zeros_l = [] for f in files: fA = scipy.sparse.load_npz(s_dir + '/' + f) dyads = sample.dyads_count(fA) dyads_l.append(dyads) singles = sample.singles_count(fA) singles_l.append(singles) zeros = sample.zeros_count(fA) zeros_l.append(zeros) # compute empirical average dyads_empirical_expectation = np.average(dyads_l) singles_empirical_expectation = np.average(singles_l) zeros_empirical_expectation = np.average(zeros_l) # debug """ print('Empirical test DCM') print('dyads') print('analytical: {}'.format(dyads_fun_expectation)) print('Empirical : {}'.format(dyads_empirical_expectation)) print('singles') print(singles_fun_expectation) print(singles_empirical_expectation) print('zeros') print(zeros_fun_expectation) print(zeros_empirical_expectation) """ # remove ensemble directory files = os.listdir(s_dir) for f in files: os.remove(s_dir + '/' + f) os.rmdir(s_dir) # testing self.assertTrue(np.allclose(dyads_fun_expectation, dyads_empirical_expectation, atol=1e-01, rtol=1e-01)) self.assertTrue(np.allclose(singles_fun_expectation, singles_empirical_expectation, atol=1e-01, rtol=1e-01)) self.assertTrue(np.allclose(zeros_fun_expectation, zeros_empirical_expectation, atol=1e-01, rtol=1e-01)) def test_dcm_rd_dyads_emseble_empirical(self): A = np.array([[0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 1.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]]) # solve the netrec problem sol, step, diff = sample.iterative_solver(A, max_steps = 300, eps = 0.01, method='dcm_rd') d = sample.scalability_classes(A, 'dcm_rd') sol_full = sample.rd2full(sol, d, 'dcm_rd') sol = sol_full # find the analytical expectation # dyads dyads_fun_expectation = sample.expected_dyads(sol, method='dcm') # singles singles_fun_expectation = sample.expected_dyads(sol, method='dcm', t='singles') # zeros zeros_fun_expectation = sample.expected_dyads(sol, method='dcm', t='zeros') # sample 100 networks (dcm method by default) s_dir = 'tmp' sample.ensemble_sampler(sol=sol, m=1000, method='dcm', sample_dir=s_dir) # count empirical dyads for each sampled network files = os.listdir(s_dir) dyads_l = [] singles_l = [] zeros_l = [] for f in files: fA = scipy.sparse.load_npz(s_dir + '/' + f) dyads = sample.dyads_count(fA) dyads_l.append(dyads) singles = sample.singles_count(fA) singles_l.append(singles) zeros = sample.zeros_count(fA) zeros_l.append(zeros) # compute empirical average dyads_empirical_expectation = np.average(dyads_l) singles_empirical_expectation = np.average(singles_l) zeros_empirical_expectation = np.average(zeros_l) # debug """ print('Empirical test DCM REDUCED') print('dyads') print('analytical: {}'.format(dyads_fun_expectation)) print('Empirical : {}'.format(dyads_empirical_expectation)) print('singles') print(singles_fun_expectation) print(singles_empirical_expectation) print('zeros') print(zeros_fun_expectation) print(zeros_empirical_expectation) """ # remove ensemble directory files = os.listdir(s_dir) for f in files: os.remove(s_dir + '/' + f) os.rmdir(s_dir) # testing self.assertTrue(np.allclose(dyads_fun_expectation, dyads_empirical_expectation, atol=1e-01, rtol=1e-01)) self.assertTrue(np.allclose(singles_fun_expectation, singles_empirical_expectation, atol=1e-01, rtol=1e-01)) self.assertTrue(np.allclose(zeros_fun_expectation, zeros_empirical_expectation, atol=1e-01, rtol=1e-01)) if __name__ == '__main__': unittest.main()
39.32
116
0.552536
1,778
13,762
4.079303
0.060742
0.075831
0.093065
0.110851
0.95395
0.95202
0.937405
0.936992
0.922515
0.914932
0
0.059151
0.30715
13,762
349
117
39.432665
0.701521
0.046796
0
0.786207
0
0
0.016674
0
0
0
0
0
0.068966
1
0.048276
false
0.006897
0.041379
0
0.096552
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
ff4f8a3c3af29408e48e1d3224f34e2bed4e06f8
16,864
py
Python
cohesivenet/api/vns3ms/user_api.py
cohesive/python-cohesivenet-sdk
5620acfa669ff97c94d9aa04a16facda37d648c1
[ "MIT" ]
null
null
null
cohesivenet/api/vns3ms/user_api.py
cohesive/python-cohesivenet-sdk
5620acfa669ff97c94d9aa04a16facda37d648c1
[ "MIT" ]
null
null
null
cohesivenet/api/vns3ms/user_api.py
cohesive/python-cohesivenet-sdk
5620acfa669ff97c94d9aa04a16facda37d648c1
[ "MIT" ]
null
null
null
# coding: utf-8 """ VNS3:ms API Cohesive networks VNS3 provides complete control of your network's addresses, routes, rules and edge. Networking does # noqa: E501 Contact: solutions@cohesive.net Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import re # noqa: F401 from cohesivenet.api_builder import VersionRouter def put_user_password( api_client, password=None, force_refresh=None, **kwargs ): # noqa: E501 """Update current user password # noqa: E501 Update current user password # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.put_user_password(password, async_req=True) :param VNS3Client api_client: (required) :param password str: New password (required) :param force_refresh bool: Force session refresh :param enabled bool: Enable/Disable API key :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["password", "force_refresh"] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} for param in [p for p in request_params if local_var_params.get(p) is not None]: body_params[param] = local_var_params[param] # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/user/password", "PUT", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_user_credentials(api_client, cred_id=None, fields=True, **kwargs): # noqa: E501 """Get user credentials # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_user_credentials(async_req=True) :param VNS3Client api_client: (required) :param cred_id int: Filter by credential ID :param fields bool: Return user credential fields in response :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["cred_id", "fields"] collection_formats = {} path_params = {} query_params = [] for param in [p for p in request_params if local_var_params.get(p) is not None]: query_params.append((param, local_var_params[param])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/user/credentials", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def post_create_user_credential( api_client, name=None, code=None, fields=None, **kwargs ): # noqa: E501 """Create user credential # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.post_create_user_credential(async_req=True) :param VNS3Client api_client: (required) :param name str: Name of new credential (required) :param code str: Credential type code (required) :param fields list[{key: str, value: str}]: Credential type code :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["name", "code", "fields"] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} # this is a little annoyance yet to be fixed in VNS3:ms (2.3.5) # expecting this field on ec2 codes body_params = { "fields": [{"key": "Use VNS3:ms IAM Role (Recommended Option)", "value": False}] } for param in [p for p in request_params if local_var_params.get(p) is not None]: body_params[param] = local_var_params[param] # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/user/credentials", "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def post_create_ec2_credential( api_client, name=None, access_key=None, secret_key=None, gov_cloud=None, **kwargs ): # noqa: E501 """Create user credential # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.post_create_user_credential(async_req=True) :param VNS3Client api_client: (required) :param name str: Name of new credential (required) :param access_key str: Access key (required) :param secret_key str: Secret key (required) :param gov_cloud bool: True if for Gov Cloud :param fields list[{key: str, value: str}]: Credential type code :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["name", "access_key", "secret_key", "gov_cloud"] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} fields = [ {"key": "Access Key", "value": access_key}, {"key": "Secret Key", "value": secret_key}, {"key": "Use VNS3:ms IAM Role (Recommended Option)", "value": False}, {"key": "GovCloud", "value": gov_cloud if gov_cloud is not None else False}, ] body_params = {"name": name, "code": "ec2", "fields": fields} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/user/credentials", "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def put_update_user_credential( api_client, credential_id, name=None, code=None, fields=None, **kwargs ): # noqa: E501 """Update user credential # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.put_update_user_credential(id, async_req=True) :param VNS3Client api_client: (required) :param credential_id int: Credential ID (required) :param name str: Name of new credential :param code str: Credential type code :param fields list[{key: str, value: str}]: Credential type code :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["name", "code", "fields"] collection_formats = {} path_params = {"credential_id": credential_id} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} for param in [p for p in request_params if local_var_params.get(p) is not None]: body_params[param] = local_var_params[param] # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/user/credentials/{credential_id}", "PUT", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def delete_user_credential(api_client, credential_id, **kwargs): # noqa: E501 """Delete user credential # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.delete_user_credential(id, async_req=True) :param VNS3Client api_client: (required) :param credential_id int: Credential ID (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {"credential_id": credential_id} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/user/credentials/{credential_id}", "DELETE", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) class UserApiRouter(VersionRouter): """User management endpoints for configuring credentials""" function_library = { "put_user_password": {"2.1.1-2.5.4": put_user_password}, "get_user_credentials": {"2.1.1-2.5.4": get_user_credentials}, "post_create_user_credential": {"2.1.1-2.5.4": post_create_user_credential}, "post_create_ec2_credential": {"2.1.1-2.5.4": post_create_ec2_credential}, "put_update_user_credential": {"2.1.1-2.5.4": put_update_user_credential}, "delete_user_credential": {"2.1.1-2.5.4": delete_user_credential}, }
34.987552
135
0.644746
2,006
16,864
5.153041
0.097208
0.038696
0.051466
0.046048
0.863016
0.854697
0.842024
0.837187
0.82403
0.803521
0
0.016606
0.264409
16,864
481
136
35.060291
0.816687
0.421905
0
0.772358
0
0
0.15585
0.032451
0
0
0
0
0
1
0.02439
false
0.020325
0.012195
0
0.069106
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
44662ca086f5763cf996cb74c5a90b037204d98c
41
py
Python
cutout/__init__.py
joaopaulomcc/cutout
16b0c201c7074f05159483e09eb7df37a20369ce
[ "MIT" ]
null
null
null
cutout/__init__.py
joaopaulomcc/cutout
16b0c201c7074f05159483e09eb7df37a20369ce
[ "MIT" ]
null
null
null
cutout/__init__.py
joaopaulomcc/cutout
16b0c201c7074f05159483e09eb7df37a20369ce
[ "MIT" ]
null
null
null
from . import colors from . import shapes
20.5
20
0.780488
6
41
5.333333
0.666667
0.625
0
0
0
0
0
0
0
0
0
0
0.170732
41
2
21
20.5
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
2b8e1dbdccdcde13d28c18d9a68f871eac852784
7,991
py
Python
test/test_packages_api.py
cvent/octopus-deploy-api-client
0e03e842e1beb29b132776aee077df570b88366a
[ "Apache-2.0" ]
null
null
null
test/test_packages_api.py
cvent/octopus-deploy-api-client
0e03e842e1beb29b132776aee077df570b88366a
[ "Apache-2.0" ]
null
null
null
test/test_packages_api.py
cvent/octopus-deploy-api-client
0e03e842e1beb29b132776aee077df570b88366a
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 """ Octopus Server API No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import octopus_deploy_swagger_client from octopus_deploy_client.packages_api import PackagesApi # noqa: E501 from octopus_deploy_swagger_client.rest import ApiException class TestPackagesApi(unittest.TestCase): """PackagesApi unit test stubs""" def setUp(self): self.api = octopus_deploy_client.packages_api.PackagesApi() # noqa: E501 def tearDown(self): pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_list_notes_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_list_notes_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_list_notes_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_list_notes_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_notes_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_notes_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_notes_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_notes_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action_0(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action_0 """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action_spaces_0(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delete_action_spaces_0 """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_signature_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_signature_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_signature_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_signature_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_upload_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_upload_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_upload_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_delta_upload_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_raw_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_raw_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_raw_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_get_raw_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_list_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_list_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_list_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_list_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_upload_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_upload_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_upload_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_repository_package_repository_upload_action_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_search_action_old(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_search_action_old """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_search_action_old_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_search_action_old_spaces """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_version_search_action(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_version_search_action """ pass def test_custom_action_response_descriptor_octopus_server_web_api_actions_package_version_search_action_spaces(self): """Test case for custom_action_response_descriptor_octopus_server_web_api_actions_package_version_search_action_spaces """ pass if __name__ == '__main__': unittest.main()
44.642458
157
0.832186
997
7,991
5.942828
0.076229
0.183629
0.162025
0.243038
0.920169
0.910042
0.910042
0.897215
0.897215
0.897215
0
0.00749
0.131148
7,991
178
158
44.893258
0.845888
0.456514
0
0.416667
1
0
0.00191
0
0
0
0
0
0
1
0.433333
false
0.416667
0.083333
0
0.533333
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
11
2babba2c0b73e281483b0129c2bb8257be15517f
207
py
Python
quik/base.py
nusov/QuikPython
d992b9d5aaf68cdda3031a08705221fe461a7780
[ "MIT" ]
null
null
null
quik/base.py
nusov/QuikPython
d992b9d5aaf68cdda3031a08705221fe461a7780
[ "MIT" ]
null
null
null
quik/base.py
nusov/QuikPython
d992b9d5aaf68cdda3031a08705221fe461a7780
[ "MIT" ]
null
null
null
class QuikNamespace(object): def __init__(self, quik): self.quik = quik class QuikNamespaceWithArg(object): def __init__(self, quik, name): self.quik = quik self.name = name
23
35
0.642512
24
207
5.208333
0.375
0.256
0.208
0.272
0.336
0
0
0
0
0
0
0
0.256039
207
9
36
23
0.811688
0
0
0.285714
0
0
0
0
0
0
0
0
0
1
0.285714
false
0
0
0
0.571429
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
7
2be5b04371fab23b1a9f7340c4e0ed210f579570
24,762
py
Python
skdecide/hub/domain/rcpsp/rcpsp_sk.py
emilienDespres/scikit-decide
2a3dd2d93e5e6d07984e1bc02b6e969261aeefbc
[ "MIT" ]
27
2020-11-23T11:45:31.000Z
2022-03-22T08:08:00.000Z
skdecide/hub/domain/rcpsp/rcpsp_sk.py
emilienDespres/scikit-decide
2a3dd2d93e5e6d07984e1bc02b6e969261aeefbc
[ "MIT" ]
94
2021-02-24T09:50:23.000Z
2022-02-27T10:07:15.000Z
skdecide/hub/domain/rcpsp/rcpsp_sk.py
emilienDespres/scikit-decide
2a3dd2d93e5e6d07984e1bc02b6e969261aeefbc
[ "MIT" ]
12
2020-12-08T10:38:26.000Z
2021-10-01T09:17:04.000Z
# Copyright (c) AIRBUS and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from __future__ import annotations from typing import Any, Dict, List, Optional, Set, Union from skdecide import DiscreteDistribution, Distribution from skdecide.builders.domain.scheduling.modes import ( ConstantModeConsumption, ModeConsumption, ) from skdecide.builders.domain.scheduling.scheduling_domains import ( MultiModeMultiSkillRCPSP, MultiModeMultiSkillRCPSPCalendar, MultiModeRCPSP, MultiModeRCPSP_Stochastic_Durations, MultiModeRCPSPCalendar, MultiModeRCPSPCalendar_Stochastic_Durations, SchedulingObjectiveEnum, SingleMode, ) class D(MultiModeRCPSP): pass class MRCPSP(D): def __init__( self, resource_names: List[str] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, Dict[str, int]]] = None, successors: Dict[int, List[int]] = None, max_horizon: int = None, resource_availability: Dict[str, int] = None, resource_renewable: Dict[str, bool] = None, ): self.resource_names = resource_names self.task_ids = task_ids self.tasks_mode = tasks_mode # transform the "mode_details" dict that we largely used in DO in the good format. self.task_mode_dict = {} self.duration_dict = {} for task in self.tasks_mode: self.task_mode_dict[task] = {} self.duration_dict[task] = {} for mode in self.tasks_mode[task]: self.task_mode_dict[task][mode] = ConstantModeConsumption({}) for r in self.tasks_mode[task][mode]: if r in self.resource_names: self.task_mode_dict[task][mode].mode_details[r] = [ self.tasks_mode[task][mode][r] ] self.duration_dict[task][mode] = self.tasks_mode[task][mode]["duration"] self.successors = successors self.max_horizon = max_horizon self.resource_availability = resource_availability self.resource_renewable = resource_renewable self.initialize_domain() def _get_tasks_modes(self) -> Dict[int, Dict[int, ModeConsumption]]: return self.task_mode_dict def _get_resource_renewability(self) -> Dict[str, bool]: return self.resource_renewable def _get_max_horizon(self) -> int: return self.max_horizon def _get_successors(self) -> Dict[int, List[int]]: return self.successors def _get_tasks_ids(self) -> Union[Set[int], Dict[int, Any], List[int]]: return self.task_ids def _get_task_duration( self, task: int, mode: Optional[int] = 1, progress_from: Optional[float] = 0.0 ) -> int: return self.duration_dict[task][mode] def _get_original_quantity_resource(self, resource: str, **kwargs) -> int: return self.resource_availability[resource] def _get_resource_types_names(self) -> List[str]: return self.resource_names def _get_objectives(self) -> List[int]: return [SchedulingObjectiveEnum.MAKESPAN] class D(MultiModeRCPSPCalendar): pass class MRCPSPCalendar(D): def _get_quantity_resource(self, resource: str, time: int, **kwargs) -> int: return self.resource_availability[resource][time] def __init__( self, resource_names: List[str] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, Dict[str, int]]] = None, successors: Dict[int, List[int]] = None, max_horizon: int = None, resource_availability: Dict[str, List[int]] = None, resource_renewable: Dict[str, bool] = None, ): self.resource_names = resource_names self.task_ids = task_ids self.tasks_mode = tasks_mode # transform the "mode_details" dict that we largely used in DO in the good format. self.task_mode_dict = {} self.duration_dict = {} for task in self.tasks_mode: self.task_mode_dict[task] = {} self.duration_dict[task] = {} for mode in self.tasks_mode[task]: self.task_mode_dict[task][mode] = ConstantModeConsumption({}) for r in self.tasks_mode[task][mode]: if r in self.resource_names: self.task_mode_dict[task][mode].mode_details[r] = [ self.tasks_mode[task][mode][r] ] self.duration_dict[task][mode] = self.tasks_mode[task][mode]["duration"] self.successors = successors self.max_horizon = max_horizon self.resource_availability = resource_availability self.original_resource_availability = { r: max(self.resource_availability[r]) for r in self.resource_availability } self.resource_renewable = resource_renewable self.initialize_domain() def _get_tasks_modes(self) -> Dict[int, Dict[int, ModeConsumption]]: return self.task_mode_dict def _get_resource_renewability(self) -> Dict[str, bool]: return self.resource_renewable def _get_max_horizon(self) -> int: return self.max_horizon def _get_successors(self) -> Dict[int, List[int]]: return self.successors def _get_tasks_ids(self) -> Union[Set[int], Dict[int, Any], List[int]]: return self.task_ids def _get_task_duration( self, task: int, mode: Optional[int] = 1, progress_from: Optional[float] = 0.0 ) -> int: return self.duration_dict[task][mode] def _get_original_quantity_resource(self, resource: str, **kwargs) -> int: return self.original_resource_availability[resource] def _get_resource_types_names(self) -> List[str]: return self.resource_names def _get_objectives(self) -> List[int]: return [SchedulingObjectiveEnum.MAKESPAN] class RCPSP(MRCPSP, SingleMode): def __init__( self, resource_names: List[str] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, Dict[str, int]]] = None, successors: Dict[int, List[int]] = None, max_horizon: int = None, resource_availability: Dict[str, int] = None, resource_renewable: Dict[str, bool] = None, ): MRCPSP.__init__( self, resource_names=resource_names, task_ids=task_ids, tasks_mode=tasks_mode, successors=successors, max_horizon=max_horizon, resource_availability=resource_availability, resource_renewable=resource_renewable, ) self.tasks_modes_rcpsp = { t: self.task_mode_dict[t][1] for t in self.task_mode_dict } def _get_tasks_mode(self) -> Dict[int, ModeConsumption]: return self.tasks_modes_rcpsp class RCPSPCalendar(MRCPSPCalendar, SingleMode): def __init__( self, resource_names: List[str] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, Dict[str, int]]] = None, successors: Dict[int, List[int]] = None, max_horizon: int = None, resource_availability: Dict[str, List[int]] = None, resource_renewable: Dict[str, bool] = None, ): MRCPSPCalendar.__init__( self, resource_names=resource_names, task_ids=task_ids, tasks_mode=tasks_mode, successors=successors, max_horizon=max_horizon, resource_availability=resource_availability, resource_renewable=resource_renewable, ) self.tasks_modes_rcpsp = { t: self.task_mode_dict[t][1] for t in self.task_mode_dict } def _get_tasks_mode(self) -> Dict[int, ModeConsumption]: return self.tasks_modes_rcpsp class Stochastic_RCPSP(MultiModeRCPSP_Stochastic_Durations): def _get_max_horizon(self) -> int: return self.max_horizon def _get_objectives(self) -> List[SchedulingObjectiveEnum]: return [SchedulingObjectiveEnum.MAKESPAN] def _get_successors(self) -> Dict[int, List[int]]: return self.successors def _get_tasks_ids(self) -> Union[Set[int], Dict[int, Any], List[int]]: return self.task_ids def _get_tasks_modes(self) -> Dict[int, Dict[int, ModeConsumption]]: return self.task_mode_dict def _get_task_duration_distribution( self, task: int, mode: Optional[int] = 1, progress_from: Optional[float] = 0.0, multivariate_settings: Optional[Dict[str, int]] = None, ) -> Distribution: return self.duration_distribution[task][mode] def _get_original_quantity_resource(self, resource: str, **kwargs) -> int: return self.resource_availability[resource] def _get_resource_types_names(self) -> List[str]: return self.resource_names def __init__( self, resource_names: List[str] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, ModeConsumption]] = None, # ressource duration_distribution: Dict[int, Dict[int, DiscreteDistribution]] = None, successors: Dict[int, List[int]] = None, max_horizon: int = None, resource_availability: Dict[str, int] = None, resource_renewable: Dict[str, bool] = None, ): self.resource_names = resource_names self.task_ids = task_ids self.tasks_mode = tasks_mode # transform the "mode_details" dict that we largely used in DO in the good format. self.task_mode_dict = self.tasks_mode self.duration_distribution = duration_distribution self.successors = successors self.max_horizon = max_horizon self.resource_availability = resource_availability self.resource_renewable = resource_renewable self.initialize_domain() def build_stochastic_from_deterministic(rcpsp: MRCPSP, task_to_noise: Set[int] = None): if task_to_noise is None: task_to_noise = set(rcpsp.get_tasks_ids()) duration_distribution = {} for task_id in rcpsp.get_tasks_ids(): duration_distribution[task_id] = {} for mode in rcpsp.get_task_modes(task_id=task_id): duration = rcpsp.get_task_duration(task=task_id, mode=mode) if duration == 0 or task_id not in task_to_noise: distrib = DiscreteDistribution(values=[(duration, 1)]) else: n = 10 distrib = DiscreteDistribution( values=[ (max(1, duration + i), 1 / (2 * n + 1)) for i in range(-n, n + 1) ] ) duration_distribution[task_id][mode] = distrib return Stochastic_RCPSP( resource_names=rcpsp.get_resource_types_names(), task_ids=rcpsp.get_tasks_ids(), tasks_mode=rcpsp.get_tasks_modes(), # ressource duration_distribution=duration_distribution, successors=rcpsp.successors, max_horizon=rcpsp.max_horizon * 2, resource_availability=rcpsp.resource_availability, resource_renewable=rcpsp.resource_renewable, ) def build_n_determinist_from_stochastic(srcpsp: Stochastic_RCPSP, nb_instance: int): instances = [] for i in range(nb_instance): modes = srcpsp.get_tasks_modes() modes_for_rcpsp = { task: { mode: { r: modes[task][mode].get_resource_need_at_time(r, 0) for r in modes[task][mode].get_ressource_names() } for mode in modes[task] } for task in modes } for t in modes_for_rcpsp: for m in modes_for_rcpsp[t]: duration = srcpsp.sample_task_duration(task=t, mode=m) modes_for_rcpsp[t][m]["duration"] = duration resource_availability_dict = {} for r in srcpsp.get_resource_types_names(): resource_availability_dict[r] = srcpsp.get_original_quantity_resource(r) instances += [ MRCPSP( resource_names=srcpsp.get_resource_types_names(), task_ids=srcpsp.get_tasks_ids(), tasks_mode=modes_for_rcpsp, # ressource successors=srcpsp.successors, # max_horizon=srcpsp.max_horizon, max_horizon=srcpsp.get_max_horizon(), # resource_availability=srcpsp.resource_availability, resource_availability=resource_availability_dict, resource_renewable=srcpsp.get_resource_renewability() # resource_renewable=srcpsp.resource_renewable ) ] return instances class D(MultiModeRCPSPCalendar_Stochastic_Durations): pass class SMRCPSPCalendar(D): def _get_task_duration_distribution( self, task: int, mode: Optional[int] = 1, progress_from: Optional[float] = 0.0, multivariate_settings: Optional[Dict[str, int]] = None, ) -> Distribution: return self.duration_distribution[task][mode] def _get_quantity_resource(self, resource: str, time: int, **kwargs) -> int: return self.resource_availability[resource][time] def __init__( self, resource_names: List[str] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, Dict[str, int]]] = None, successors: Dict[int, List[int]] = None, duration_distribution: Dict[int, Dict[int, DiscreteDistribution]] = None, max_horizon: int = None, resource_availability: Dict[str, List[int]] = None, resource_renewable: Dict[str, bool] = None, ): self.resource_names = resource_names self.task_ids = task_ids self.tasks_mode = tasks_mode # transform the "mode_details" dict that we largely used in DO in the good format. self.task_mode_dict = {} self.duration_dict = {} for task in self.tasks_mode: self.task_mode_dict[task] = {} self.duration_dict[task] = {} for mode in self.tasks_mode[task]: self.task_mode_dict[task][mode] = ConstantModeConsumption({}) for r in self.tasks_mode[task][mode]: if r in self.resource_names: self.task_mode_dict[task][mode].mode_details[r] = [ self.tasks_mode[task][mode][r] ] self.duration_dict[task][mode] = self.tasks_mode[task][mode]["duration"] self.successors = successors self.max_horizon = max_horizon self.resource_availability = resource_availability self.resource_renewable = resource_renewable self.duration_distribution = duration_distribution self.initialize_domain() def _get_tasks_modes(self) -> Dict[int, Dict[int, ModeConsumption]]: return self.task_mode_dict def _get_resource_renewability(self) -> Dict[str, bool]: return self.resource_renewable def _get_max_horizon(self) -> int: return self.max_horizon def _get_successors(self) -> Dict[int, List[int]]: return self.successors def _get_tasks_ids(self) -> Union[Set[int], Dict[int, Any], List[int]]: return self.task_ids def _get_task_duration( self, task: int, mode: Optional[int] = 1, progress_from: Optional[float] = 0.0 ) -> int: return self.duration_dict[task][mode] def _get_original_quantity_resource(self, resource: str, **kwargs) -> int: # return self.resource_availability[resource] return self.original_resource_availability[resource] def _get_resource_types_names(self) -> List[str]: return self.resource_names def _get_objectives(self) -> List[int]: return [SchedulingObjectiveEnum.MAKESPAN] class D(MultiModeMultiSkillRCPSP): pass class MSRCPSP(D): def __init__( self, skills_names: List[str] = None, resource_unit_names: List[str] = None, resource_type_names: List[str] = None, resource_skills: Dict[str, Dict[str, Any]] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, Dict[str, int]]] = None, successors: Dict[int, List[int]] = None, max_horizon: int = None, resource_availability: Dict[str, int] = None, resource_renewable: Dict[str, bool] = None, ): self.skills_set = set(skills_names) self.resource_unit_names = resource_unit_names self.resource_type_names = resource_type_names self.resource_skills = resource_skills self.task_ids = task_ids self.tasks_mode = tasks_mode # transform the "mode_details" dict that we largely used in DO in the good format. self.task_mode_dict = {} self.task_skills_dict = {} self.duration_dict = {} for task in self.tasks_mode: self.task_mode_dict[task] = {} self.task_skills_dict[task] = {} self.duration_dict[task] = {} for mode in self.tasks_mode[task]: self.task_mode_dict[task][mode] = ConstantModeConsumption({}) self.task_skills_dict[task][mode] = {} for r in self.tasks_mode[task][mode]: if r in self.resource_type_names: self.task_mode_dict[task][mode].mode_details[r] = [ self.tasks_mode[task][mode][r] ] if r in self.skills_set: self.task_skills_dict[task][mode][r] = self.tasks_mode[task][ mode ][r] self.duration_dict[task][mode] = self.tasks_mode[task][mode]["duration"] self.successors = successors self.max_horizon = max_horizon self.resource_availability = resource_availability self.resource_renewable = resource_renewable self.initialize_domain() def _get_resource_units_names(self) -> List[str]: """Return the names (string) of all resource units as a list.""" return self.resource_unit_names def _get_resource_types_names(self) -> List[str]: return self.resource_type_names def _get_resource_type_for_unit(self) -> Dict[str, str]: """Return a dictionary where the key is a resource unit name and the value a resource type name. An empty dictionary can be used if there are no resource unit matching a resource type.""" return None def get_max_horizon(self) -> int: return self.max_horizon def _get_tasks_modes(self) -> Dict[int, Dict[int, ModeConsumption]]: return self.task_mode_dict def _get_successors(self) -> Dict[int, List[int]]: return self.successors def _get_tasks_ids(self) -> Union[Set[int], Dict[int, Any], List[int]]: return self.task_ids def _get_task_duration( self, task: int, mode: Optional[int] = 1, progress_from: Optional[float] = 0.0 ) -> int: return self.duration_dict[task][mode] def _get_original_quantity_resource(self, resource: str, **kwargs) -> int: return self.resource_availability[resource] def _get_resource_renewability(self) -> Dict[str, bool]: return self.resource_renewable def _get_all_resources_skills(self) -> Dict[str, Dict[str, Any]]: return self.resource_skills def _get_all_tasks_skills(self) -> Dict[int, Dict[int, Dict[str, Any]]]: return self.task_skills_dict def _get_objectives(self) -> List[int]: return [SchedulingObjectiveEnum.MAKESPAN] class D(MultiModeMultiSkillRCPSPCalendar): pass class MSRCPSPCalendar(D): def _get_max_horizon(self) -> int: return self.max_horizon def _get_quantity_resource(self, resource: str, time: int, **kwargs) -> int: return self.resource_availability[resource][time] def __init__( self, skills_names: List[str] = None, resource_unit_names: List[str] = None, resource_type_names: List[str] = None, resource_skills: Dict[str, Dict[str, Any]] = None, task_ids: List[int] = None, tasks_mode: Dict[int, Dict[int, Dict[str, int]]] = None, successors: Dict[int, List[int]] = None, max_horizon: int = None, resource_availability: Dict[str, List[int]] = None, resource_renewable: Dict[str, bool] = None, ): self.skills_set = set(skills_names) self.resource_unit_names = resource_unit_names self.resource_type_names = resource_type_names self.resource_skills = resource_skills self.task_ids = task_ids self.tasks_mode = tasks_mode # transform the "mode_details" dict that we largely used in DO in the good format. self.task_mode_dict = {} self.task_skills_dict = {} self.duration_dict = {} for task in self.tasks_mode: self.task_mode_dict[task] = {} self.task_skills_dict[task] = {} self.duration_dict[task] = {} for mode in self.tasks_mode[task]: self.task_mode_dict[task][mode] = ConstantModeConsumption({}) self.task_skills_dict[task][mode] = {} for r in self.tasks_mode[task][mode]: if r in self.resource_type_names: self.task_mode_dict[task][mode].mode_details[r] = [ self.tasks_mode[task][mode][r] ] if r in self.skills_set: self.task_skills_dict[task][mode][r] = self.tasks_mode[task][ mode ][r] self.duration_dict[task][mode] = self.tasks_mode[task][mode]["duration"] self.successors = successors self.max_horizon = max_horizon self.resource_availability = resource_availability self.resource_renewable = resource_renewable self.initialize_domain() def _get_resource_units_names(self) -> List[str]: """Return the names (string) of all resource units as a list.""" return self.resource_unit_names def _get_resource_types_names(self) -> List[str]: return self.resource_type_names def _get_resource_type_for_unit(self) -> Dict[str, str]: """Return a dictionary where the key is a resource unit name and the value a resource type name. An empty dictionary can be used if there are no resource unit matching a resource type.""" return None def get_max_horizon(self) -> int: return self.max_horizon def _get_tasks_modes(self) -> Dict[int, Dict[int, ModeConsumption]]: return self.task_mode_dict def _get_successors(self) -> Dict[int, List[int]]: return self.successors def _get_tasks_ids(self) -> Union[Set[int], Dict[int, Any], List[int]]: return self.task_ids def _get_task_duration( self, task: int, mode: Optional[int] = 1, progress_from: Optional[float] = 0.0 ) -> int: return self.duration_dict[task][mode] def _get_original_quantity_resource(self, resource: str, **kwargs) -> int: return self.resource_availability[resource] def _get_resource_renewability(self) -> Dict[str, bool]: return self.resource_renewable def _get_all_resources_skills(self) -> Dict[str, Dict[str, Any]]: return self.resource_skills def _get_all_tasks_skills(self) -> Dict[int, Dict[int, Dict[str, Any]]]: return self.task_skills_dict def _get_objectives(self) -> List[int]: return [SchedulingObjectiveEnum.MAKESPAN] if __name__ == "__main__": from skdecide.hub.domain.rcpsp.rcpsp_sk_parser import load_domain domain = load_domain() state = domain.get_initial_state() print("Initial state : ", state) actions = domain.get_applicable_actions(state) print([str(action) for action in actions.get_elements()]) action = actions.get_elements()[0] new_state = domain.get_next_state(state, action) print("New state ", new_state) actions = domain.get_applicable_actions(new_state) print("New actions : ", [str(action) for action in actions.get_elements()]) action = actions.get_elements()[0] print(action) new_state = domain.get_next_state(new_state, action) print("New state :", new_state) print("_is_terminal: ", domain._is_terminal(state))
37.920368
104
0.634036
2,970
24,762
5.011785
0.057912
0.041384
0.029694
0.033322
0.825865
0.810682
0.788176
0.783876
0.776621
0.776621
0
0.001987
0.268274
24,762
652
105
37.978528
0.819526
0.053792
0
0.76381
0
0
0.005176
0
0
0
0
0
0
1
0.148571
false
0.009524
0.011429
0.121905
0.318095
0.013333
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
8
2be7ee7dbce717ad4408fd9e6ad00a8e62ec5ebf
290
py
Python
irflow_client/__init__.py
Syncurity/irflow-sdk-python
26359afb93ac870dde2e13ee7b6ce675eb2b09d8
[ "Apache-2.0" ]
1
2018-10-23T05:31:07.000Z
2018-10-23T05:31:07.000Z
irflow_client/__init__.py
Syncurity/irflow-sdk-python
26359afb93ac870dde2e13ee7b6ce675eb2b09d8
[ "Apache-2.0" ]
9
2018-10-23T05:05:51.000Z
2020-03-24T16:43:07.000Z
irflow_client/__init__.py
Syncurity/irflow-sdk-python
26359afb93ac870dde2e13ee7b6ce675eb2b09d8
[ "Apache-2.0" ]
null
null
null
"""In order to make the IRFlowApi Class available globally we need the below input statement TODO Determine if we should call irflow_api.py irflow_client.py""" try: from irflow_client.irflow_client import IRFlowClient except ImportError: from irflow_client import IRFlowClient
36.25
92
0.8
42
290
5.404762
0.690476
0.211454
0.140969
0.264317
0
0
0
0
0
0
0
0
0.162069
290
7
93
41.428571
0.934156
0.527586
0
0
0
0
0
0
0
0
0
0.142857
0
1
0
true
0
0.75
0
0.75
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
1
0
1
0
0
7
92025d2ea7666a90c6ae512df6fa7ace446efbc9
3,655
py
Python
herders/migrations/0004_auto_20190516_1304.py
Itori/swarfarm
7192e2d8bca093b4254023bbec42b6a2b1887547
[ "Apache-2.0" ]
66
2017-09-11T04:46:00.000Z
2021-03-13T00:02:42.000Z
herders/migrations/0004_auto_20190516_1304.py
Itori/swarfarm
7192e2d8bca093b4254023bbec42b6a2b1887547
[ "Apache-2.0" ]
133
2017-09-24T21:28:59.000Z
2021-04-02T10:35:31.000Z
herders/migrations/0004_auto_20190516_1304.py
Itori/swarfarm
7192e2d8bca093b4254023bbec42b6a2b1887547
[ "Apache-2.0" ]
28
2017-08-30T19:04:32.000Z
2020-11-16T04:09:00.000Z
# Generated by Django 2.1.7 on 2019-05-16 20:04 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('herders', '0003_auto_20190213_1224'), ] operations = [ migrations.AddField( model_name='storage', name='dark_angelmon', field=models.IntegerField(default=0, help_text='Dark Angelmon'), ), migrations.AddField( model_name='storage', name='dark_king_angelmon', field=models.IntegerField(default=0, help_text='Dark King Angelmon'), ), migrations.AddField( model_name='storage', name='devilmon', field=models.IntegerField(default=0, help_text='Devilmon'), ), migrations.AddField( model_name='storage', name='fire_angelmon', field=models.IntegerField(default=0, help_text='Fire Angelmon'), ), migrations.AddField( model_name='storage', name='fire_king_angelmon', field=models.IntegerField(default=0, help_text='Fire King Angelmon'), ), migrations.AddField( model_name='storage', name='light_angelmon', field=models.IntegerField(default=0, help_text='Light Angelmon'), ), migrations.AddField( model_name='storage', name='light_king_angelmon', field=models.IntegerField(default=0, help_text='Light King Angelmon'), ), migrations.AddField( model_name='storage', name='rainbowmon_2_20', field=models.IntegerField(default=0, help_text='Rainbowmon 2⭐ lv.20'), ), migrations.AddField( model_name='storage', name='rainbowmon_3_1', field=models.IntegerField(default=0, help_text='Rainbowmon 3⭐ lv.1'), ), migrations.AddField( model_name='storage', name='rainbowmon_3_25', field=models.IntegerField(default=0, help_text='Rainbowmon 3⭐ lv.25'), ), migrations.AddField( model_name='storage', name='rainbowmon_4_1', field=models.IntegerField(default=0, help_text='Rainbowmon 4⭐ lv.1'), ), migrations.AddField( model_name='storage', name='rainbowmon_4_30', field=models.IntegerField(default=0, help_text='Rainbowmon 4⭐ lv.30'), ), migrations.AddField( model_name='storage', name='rainbowmon_5_1', field=models.IntegerField(default=0, help_text='Rainbowmon 5⭐ lv.1'), ), migrations.AddField( model_name='storage', name='super_angelmon', field=models.IntegerField(default=0, help_text='Super Angelmon'), ), migrations.AddField( model_name='storage', name='water_angelmon', field=models.IntegerField(default=0, help_text='Water Angelmon'), ), migrations.AddField( model_name='storage', name='water_king_angelmon', field=models.IntegerField(default=0, help_text='Water King Angelmon'), ), migrations.AddField( model_name='storage', name='wind_angelmon', field=models.IntegerField(default=0, help_text='Wind Angelmon'), ), migrations.AddField( model_name='storage', name='wind_king_angelmon', field=models.IntegerField(default=0, help_text='Wind King Angelmon'), ), ]
35.144231
82
0.573187
367
3,655
5.542234
0.144414
0.159292
0.20354
0.238938
0.89823
0.89823
0.89823
0.760079
0.46116
0.152409
0
0.031238
0.308071
3,655
103
83
35.485437
0.77066
0.012312
0
0.556701
1
0
0.198448
0.006375
0
0
0
0
0
1
0
false
0
0.010309
0
0.041237
0
0
0
0
null
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
a6056b9aae5ba2ae85e385d857f5daaee61a7baf
15,359
py
Python
play_various_walking.py
wpumacay/bipedal-robot-walking-simulation
9abcdf8ba7d07c3f32ffbeb0a65682f74970d217
[ "MIT" ]
103
2018-10-23T16:33:08.000Z
2022-01-19T03:42:18.000Z
play_various_walking.py
wpumacay/bipedal-robot-walking-simulation
9abcdf8ba7d07c3f32ffbeb0a65682f74970d217
[ "MIT" ]
null
null
null
play_various_walking.py
wpumacay/bipedal-robot-walking-simulation
9abcdf8ba7d07c3f32ffbeb0a65682f74970d217
[ "MIT" ]
26
2018-10-23T18:28:14.000Z
2021-10-30T15:35:28.000Z
""" bipedal robot walking simulation by Einsbon (Sunbin Kim) - GitHub: https://github.com/Einsbon - Youtube: https://www.youtube.com/channel/UCt7FZ-8uzV_jHJiKp3NlHvg - Blog: https://blog.naver.com/einsbon """ import pybullet as p import time from time import sleep import pybullet_data import numpy as np import math import os import motorController import walkGenerator # motor setting motor_kp = 0.5 motor_kd = 0.5 motor_torque = 1.5 motor_max_velocity = 5.0 # physics parameter setting fixedTimeStep = 1./1000 numSolverIterations = 200 physicsClient = p.connect(p.GUI) p.setTimeStep(fixedTimeStep) p.setPhysicsEngineParameter(numSolverIterations=numSolverIterations) p.setAdditionalSearchPath(pybullet_data.getDataPath()) # to load plane.urdf p.setGravity(0, 0, 0) p.resetDebugVisualizerCamera(cameraDistance=1, cameraYaw=10, cameraPitch=-5, cameraTargetPosition=[0.3, 0.5, 0.1]) # samurai.urdf plane.urdf planeId = p.loadSDF('stadium.sdf') robot = p.loadURDF(os.path.abspath(os.path.dirname(__file__))+'/humanoid_leg_12dof.7.urdf', [0, 0, 0.05], p.getQuaternionFromEuler([0, 0, 0]), useFixedBase=False) controller = motorController.MotorController( robot, physicsClient, fixedTimeStep, motor_kp, motor_kd, motor_torque, motor_max_velocity) walk = walkGenerator.WalkGenerator() walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=50, l=90, sit=40, swayBody=45, swayFoot=0, bodyPositionXPlus=5, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime p.setGravity(0, 0, -9.8) p.setRealTimeSimulation(0) controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 1, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() # time.sleep(fixedTimeStep) p.setGravity(0, 0, -9.8) # walk 8 steps # start walking. right foot step for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[i], actionTime, 0) for i in range(2): # left foot step for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointLeftStepInverse[i], actionTime, 0) # right foot step for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointRightStepInverse[i], actionTime, 0) # end walking. left for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointEndLeftInverse[i], actionTime, 0) # rest 2 seconds waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=50, l=90, sit=70, swayBody=45, swayFoot=0, bodyPositionXPlus=5, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 2, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() p.setGravity(0, 0, -9.8) for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointLeftStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointRightStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointEndLeftInverse[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=20, l=40, sit=40, swayBody=30, swayFoot=0, bodyPositionXPlus=5, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.03, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 2, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointLeftStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointRightStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointEndLeftInverse[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=50, l=140, sit=40, swayBody=50, swayFoot=0, bodyPositionXPlus=-2, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.12, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 2, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointLeftStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointRightStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointEndLeftInverse[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=40, l=70, sit=40, swayBody=45, swayFoot=0, bodyPositionXPlus=-40, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 2, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointLeftStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointRightStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointEndLeftInverse[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=50, l=-90, sit=40, swayBody=45, swayFoot=0, bodyPositionXPlus=0, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 2, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[i], actionTime, 0) for i in range(2): # repeat twice # left foot step for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointLeftStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointRightStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep(walk.walkPointEndLeftInverse[i], actionTime, 0) waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=50, l=0, sit=40, swayBody=45, swayFoot=0, bodyPositionXPlus=5, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 2, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() # Turn function is not accurate yet. for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointStartRightInverse[i]+walk.turnListUnfold[i]*0.3, actionTime, 0) for i in range(3): for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointLeftStepInverse[i]+walk.turnListFold[i]*0.3, actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointRightStepInverse[i]+walk.turnListUnfold[i]*0.3, actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointEndLeftInverse[i]+walk.turnListFold[i]*0.3, actionTime, 0) waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() ######################################################## p.resetBasePositionAndOrientation(robot, [0, 0, 0.0], p.getQuaternionFromEuler([0, 0, 0])) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=50, l=90, sit=40, swayBody=35, swayFoot=0, bodyPositionXPlus=5, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.06, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 2, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointStartRightInverse[i]+walk.turnListUnfold[i]*0.3, actionTime, 0) for i in range(3): for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointLeftStepInverse[i]+walk.turnListFold[i]*0.3, actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointRightStepInverse[i]+walk.turnListUnfold[i]*0.3, actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller.setMotorsAngleInFixedTimestep( walk.walkPointEndLeftInverse[i]+walk.turnListFold[i]*0.3, actionTime, 0) ######################################################## fixedTimeStep = 1/500 p.setTimeStep(fixedTimeStep) giantRobot = p.loadURDF(os.path.abspath(os.path.dirname(__file__))+'/humanoid_leg_12dof.7.urdf', [-4.9, -0.2, 0.01], p.getQuaternionFromEuler([0, 0, 0]), useFixedBase=False, globalScaling=10) waitTime = 2 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(p.getNumJoints(giantRobot)): p.changeDynamics(giantRobot, i, lateralFriction=5) controller2 = motorController.MotorController( giantRobot, physicsClient, fixedTimeStep, motor_kp, motor_kd, 50, motor_max_velocity) walk.setWalkParameter(bodyMovePoint=8, legMovePoint=8, h=50, l=90, sit=30, swayBody=35, swayFoot=0, bodyPositionXPlus=5, swayShift=3, weightStart=0.4, weightEnd=0.6, stepTime=0.08, damping=0.0, incline=0.0) walk.generate() walk.inverseKinematicsAll() actionTime = walk._stepTime controller2.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[0], 4, 0) waitTime = 1 repeatTime = int(waitTime/fixedTimeStep) for _ in range(repeatTime): p.stepSimulation() for i in range(np.size(walk.walkPointStartRightInverse, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkPointStartRightInverse[i], actionTime, 0) for i in range(2): for i in range(np.size(walk.walkPointLeftStepInverse, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkPointLeftStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointRightStepInverse, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkPointRightStepInverse[i], actionTime, 0) for i in range(np.size(walk.walkPointEndLeftInverse, 0)): controller2.setMotorsAngleInFixedTimestep(walk.walkPointEndLeftInverse[i], actionTime, 0) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([-10, -50, 30], [-10, -50, 30]), 1, 0.5) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([-80, -50, 50], [-10, -50, 30]), 1.5, 0.5) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([-10, -50, 40], [0, -50, 30]), 0.4, 0) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([60, -40, 40], [0, -40, 30]), 0.2, 3) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([0, -40, 45], [0, -40, 30]), 1, 0) controller2.setMotorsAngleInFixedTimestep(walk.inverseKinematicsPoint([0, 0, 10], [0, 0, 10]), 0.5, 50)
42.78273
130
0.736506
1,754
15,359
6.417902
0.096921
0.039176
0.024518
0.04495
0.870125
0.845874
0.810163
0.798881
0.798881
0.798881
0
0.042503
0.117651
15,359
358
131
42.902235
0.788149
0.031512
0
0.764259
0
0
0.004373
0.00361
0
0
0
0
0
1
0
false
0
0.034221
0
0.034221
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
a6a61b583837ac5ade8a8de8492d2c75541e8a98
2,970
py
Python
.spyder-py3/history.py
evandadure/polychess_henaff
92bb14973f9ff7919cafe4d3a323688c6dea5b7d
[ "MIT" ]
null
null
null
.spyder-py3/history.py
evandadure/polychess_henaff
92bb14973f9ff7919cafe4d3a323688c6dea5b7d
[ "MIT" ]
null
null
null
.spyder-py3/history.py
evandadure/polychess_henaff
92bb14973f9ff7919cafe4d3a323688c6dea5b7d
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # *** Spyder Python Console History Log *** pip install python-chess install python-chess runfile('E:/Downloads/polychess-master/polychess-master/polychess.py', wdir='E:/Downloads/polychess-master/polychess-master') board.legal_moves list(board.legal_moves) list(board.legal_moves)[0] print(list(board.legal_moves)[0]) board.turn reader.find_all(board) board list(board.legal_moves) list(board.legal_moves)[0] board.push(chess.Move.from_uci(chess.Move.uci(list(board.legal_moves)[0]))) board board.pop() board board.push(chess.Move.from_uci(chess.Move.uci(list(board.legal_moves)[0]))) board board.pop() board board.push(chess.Move.from_uci(chess.Move.uci(list(board.legal_moves)[0]))) board board.pop() board board.push(chess.Move.from_uci(chess.Move.uci(list(board.legal_moves)[0]))) board board.pop() board runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') %clear runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') board board2 getBoardEval(board2.FEN()) runfile('E:/Cours/ProjChess/polychess_henaff/evaluation.py', wdir='E:/Cours/ProjChess/polychess_henaff') getBoardEval(board2.FEN()) getBoardEval(board2.fen()) minMax(board2) minMax(board2, 1) runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') minMax(board2, 1) minMax(board2, 3) minMax(board2, 1) minMax(board2, 2) runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') %clear runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') board runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') debugfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') board2.legal_moves board2.legal_moves() print(board2.legal_moves()) list(board.legal_moves) list(board2.legal_moves) runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') board2 runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') board2 runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') board1 runfile('E:/Cours/ProjChess/polychess_henaff/minMax.py', wdir='E:/Cours/ProjChess/polychess_henaff') %clear runfile('E:/Cours/ProjChess/polychess_henaff/polychess.py', wdir='E:/Cours/ProjChess/polychess_henaff') %clear runfile('E:/Cours/ProjChess/polychess_henaff/polychess.py', wdir='E:/Cours/ProjChess/polychess_henaff') chess.pgn.Game.from_board(board) runfile('E:/Cours/ProjChess/polychess_henaff/polychess.py', wdir='E:/Cours/ProjChess/polychess_henaff') runfile('E:/Cours/ProjChess/polychess_henaff/lichess.py', wdir='E:/Cours/ProjChess/polychess_henaff')
41.25
125
0.793266
435
2,970
5.289655
0.114943
0.088657
0.221643
0.354628
0.854846
0.835724
0.771404
0.71708
0.71708
0.683181
0
0.010482
0.036364
2,970
72
126
41.25
0.793501
0.021212
0
0.714286
0
0
0.509122
0.509122
0
0
0
0
0
0
null
null
0
0
null
null
0.028571
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
null
0
0
0
0
1
0
0
0
0
0
0
0
0
11
a6fafe14a2953a024b22608704e01da375618561
7,435
py
Python
cifra/tests/test_console_parser.py
dante-signal31/cifra
3914822ec49cd238fe31fd58424fa2b058116b9a
[ "BSD-3-Clause" ]
null
null
null
cifra/tests/test_console_parser.py
dante-signal31/cifra
3914822ec49cd238fe31fd58424fa2b058116b9a
[ "BSD-3-Clause" ]
null
null
null
cifra/tests/test_console_parser.py
dante-signal31/cifra
3914822ec49cd238fe31fd58424fa2b058116b9a
[ "BSD-3-Clause" ]
null
null
null
""" Tests for argument parsing at launcher. """ import tempfile import pytest import cifra.cifra_launcher as cifra_launcher from typing import Dict def _assert_dict_key(key: str, value: str, _dict: Dict[str, str]): assert key in _dict assert _dict[key] == value @pytest.mark.quick_test def test_launcher_create_dictionary(): provided_args = "dictionary create klingon".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "dictionary") _assert("action", "create") _assert("dictionary_name", "klingon") @pytest.mark.quick_test def test_launcher_create_dictionary_with_initial_file(): with tempfile.NamedTemporaryFile() as output_file: provided_args = f"dictionary create klingon --initial_words_file {output_file.name}".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "dictionary") _assert("action", "create") _assert("dictionary_name", "klingon") _assert("initial_words_file", f"{output_file.name}") @pytest.mark.quick_test def test_launcher_create_dictionary_with_not_existing_initial_file(): with pytest.raises(BaseException): provided_args = "dictionary create klingon --initial_words_file klingon_novel.txt".split() _: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) @pytest.mark.quick_test def test_launcher_delete_dictionary(): provided_args = "dictionary delete klingon".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "dictionary") _assert("action", "delete") _assert("dictionary_name", "klingon") @pytest.mark.quick_test def test_launcher_update_dictionary(): with tempfile.NamedTemporaryFile() as words_file: provided_args = f"dictionary update klingon {words_file.name}".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "dictionary") _assert("action", "update") _assert("dictionary_name", "klingon") _assert("words_file", f"{words_file.name}") @pytest.mark.quick_test def test_launcher_cipher_caesar(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"cipher caesar 3 {message_file.name}".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "cipher") _assert("algorithm", "caesar") _assert("key", "3") _assert("file_to_cipher", f"{message_file.name}") assert "ciphered_file" not in parsed_arguments.keys() @pytest.mark.quick_test def test_launcher_cipher_caesar_with_output_file(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"cipher caesar 3 {message_file.name} --ciphered_file ciphered_message.txt".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "cipher") _assert("algorithm", "caesar") _assert("key", "3") _assert("file_to_cipher", f"{message_file.name}") _assert("ciphered_file", "ciphered_message.txt") @pytest.mark.quick_test def test_launcher_incorrect_cipher_algorithm(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"cipher augustus 3 {message_file.name} --ciphered_file ciphered_message.txt".split() with pytest.raises(BaseException): _: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) @pytest.mark.quick_test def test_launcher_decipher_caesar(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"decipher caesar 3 {message_file.name}".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "decipher") _assert("algorithm", "caesar") _assert("key", "3") _assert("file_to_decipher", f"{message_file.name}") assert "deciphered_file" not in parsed_arguments.keys() @pytest.mark.quick_test def test_launcher_decipher_caesar_with_output_file(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"decipher caesar 3 {message_file.name} --deciphered_file deciphered_message.txt".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "decipher") _assert("algorithm", "caesar") _assert("key", "3") _assert("file_to_decipher", f"{message_file.name}") _assert("deciphered_file", "deciphered_message.txt") @pytest.mark.quick_test def test_launcher_incorrect_decipher_algorithm(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"decipher augustus 3 {message_file.name} --deciphered_file deciphered_message.txt".split() with pytest.raises(BaseException): _: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) @pytest.mark.quick_test def test_launcher_attack_caesar(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"attack caesar {message_file.name} --deciphered_file recovered_message.txt".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "attack") _assert("algorithm", "caesar") _assert("file_to_attack", f"{message_file.name}") _assert("deciphered_file", "recovered_message.txt") assert "charset" not in parsed_arguments.keys() @pytest.mark.quick_test def test_launcher_attack_caesar_with_charset(): with tempfile.NamedTemporaryFile() as message_file: provided_args = f"attack caesar {message_file.name} --deciphered_file recovered_message.txt " \ "--charset abcdefghijklmnñopqrstuvwxyz".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "attack") _assert("algorithm", "caesar") _assert("file_to_attack", f"{message_file.name}") _assert("deciphered_file", "recovered_message.txt") _assert("charset", "abcdefghijklmnñopqrstuvwxyz") @pytest.mark.quick_test def test_launcher_list_dictionaries(): provided_args = "dictionary list".split() parsed_arguments: Dict[str, str] = cifra_launcher.parse_arguments(provided_args) def _assert(key, value): return _assert_dict_key(key, value, parsed_arguments) _assert("mode", "dictionary") _assert("action", "list")
43.735294
115
0.723336
906
7,435
5.575055
0.076159
0.066521
0.029697
0.052663
0.885567
0.847357
0.831914
0.825183
0.825183
0.812116
0
0.001607
0.163013
7,435
169
116
43.994083
0.810059
0.005245
0
0.634328
0
0
0.224553
0.027612
0
0
0
0
0.447761
1
0.19403
false
0
0.029851
0.08209
0.223881
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
47267c5df30d85a7dcabcf5f248f2f8f06c5d54d
3,445
py
Python
models/classifier_models.py
ruanyyyyyyy/text2shape
276379df22ffdbe4cb54f30a88d65cf87bf53243
[ "Apache-2.0" ]
72
2018-03-27T13:45:09.000Z
2022-03-16T03:04:02.000Z
models/classifier_models.py
ruanyyyyyyy/text2shape
276379df22ffdbe4cb54f30a88d65cf87bf53243
[ "Apache-2.0" ]
9
2018-05-30T04:05:50.000Z
2020-12-02T01:19:01.000Z
models/classifier_models.py
ruanyyyyyyy/text2shape
276379df22ffdbe4cb54f30a88d65cf87bf53243
[ "Apache-2.0" ]
23
2018-03-26T21:35:09.000Z
2022-02-24T17:39:58.000Z
from lib.classifier import Classifier import lib.layers as layers import tensorflow as tf class Classifier1(Classifier): def __init__(self, inputs_dict, is_training, reuse=False, name='classifier_1'): super(Classifier1, self).__init__(inputs_dict, is_training, reuse=reuse, name=name) def build_architecture(self): num_classes = self.num_classes x = self.placeholders['shape_batch'] x = layers.conv3d(x, 64, 3, strides=2, padding='same', name='conv1', reuse=self.reuse) x = tf.layers.batch_normalization(x, training=self.is_training, name='conv1_batch_norm', reuse=self.reuse) x = layers.relu(x, name='conv1_relu') x = layers.conv3d(x, 128, 3, strides=2, padding='same', name='conv2', reuse=self.reuse) x = tf.layers.batch_normalization(x, training=self.is_training, name='conv2_batch_norm', reuse=self.reuse) x = layers.relu(x, name='conv2_relu') x = layers.conv3d(x, 256, 3, strides=2, padding='same', name='conv3', reuse=self.reuse) x = tf.layers.batch_normalization(x, training=self.is_training, name='conv3_batch_norm', reuse=self.reuse) x = layers.relu(x, name='conv3_relu') x = layers.avg_pooling3d(x, name='avg_pool4') encoder_output = x x = layers.dense(x, num_classes, name='fc5', reuse=self.reuse) prob = layers.softmax(x, name='softmax_layer') output_dict = { 'logits': x, 'probabilities': prob, 'encoder_output': encoder_output, } return output_dict class Classifier128(Classifier): """Classifier with 128 dim embeddings. """ def __init__(self, inputs_dict, is_training, reuse=False, name='classifier_128'): super(Classifier128, self).__init__(inputs_dict, is_training, reuse=reuse, name=name) def build_architecture(self): x = self.placeholders['shape_batch'] num_classes = self.num_classes # Chair/table classification x = layers.conv3d(x, 64, 3, strides=2, padding='same', name='conv1', reuse=self.reuse) x = tf.layers.batch_normalization(x, training=self.is_training, name='conv1_batch_norm', reuse=self.reuse) x = layers.relu(x, name='conv1_relu') x = layers.conv3d(x, 128, 3, strides=2, padding='same', name='conv2', reuse=self.reuse) x = tf.layers.batch_normalization(x, training=self.is_training, name='conv2_batch_norm', reuse=self.reuse) x = layers.relu(x, name='conv2_relu') x = layers.conv3d(x, 256, 3, strides=2, padding='same', name='conv3', reuse=self.reuse) x = tf.layers.batch_normalization(x, training=self.is_training, name='conv3_batch_norm', reuse=self.reuse) x = layers.relu(x, name='conv3_relu') x = layers.avg_pooling3d(x, name='avg_pool4') x = layers.dense(x, 128, name='fc5', reuse=self.reuse) encoder_output = x x = layers.dense(x, num_classes, name='fc6', reuse=self.reuse) prob = layers.softmax(x, name='softmax_layer') output_dict = { 'logits': x, 'probabilities': prob, 'encoder_output': encoder_output, } return output_dict
44.166667
96
0.607547
432
3,445
4.655093
0.152778
0.059175
0.104426
0.089508
0.880159
0.817504
0.817504
0.817504
0.817504
0.817504
0
0.030459
0.266183
3,445
77
97
44.74026
0.765032
0.019739
0
0.766667
0
0
0.111836
0
0
0
0
0
0
1
0.066667
false
0
0.05
0
0.183333
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5b643aec8452d89a30c0884bc04c994b9a13769d
5,240
py
Python
datacenter/migrations/0001_initial.py
MZen2610/electronic-diary
be78854f3edf06aa348c61b3592715a86fc6dcbc
[ "MIT" ]
1
2021-01-20T17:39:03.000Z
2021-01-20T17:39:03.000Z
datacenter/migrations/0001_initial.py
MZen2610/electronic-diary
be78854f3edf06aa348c61b3592715a86fc6dcbc
[ "MIT" ]
null
null
null
datacenter/migrations/0001_initial.py
MZen2610/electronic-diary
be78854f3edf06aa348c61b3592715a86fc6dcbc
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2019-06-26 17:28 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Commendation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('text', models.TextField()), ('date', models.DateField()), ], ), migrations.CreateModel( name='Lesson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200)), ('year_of_study', models.IntegerField()), ('year_of_study_group', models.CharField(max_length=1)), ('timeslot', models.IntegerField()), ('room', models.CharField(max_length=50)), ('date', models.DateField()), ], ), migrations.CreateModel( name='Mark', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('points', models.IntegerField()), ('teacher_note', models.TextField()), ('date', models.DateField()), ], ), migrations.CreateModel( name='Schoolkid', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('full_name', models.CharField(max_length=200)), ('birthday', models.DateField(null=True)), ('year_started_education', models.IntegerField(null=True)), ('year_of_study', models.IntegerField(null=True)), ('year_of_study_group', models.CharField(max_length=1, null=True)), ], ), migrations.CreateModel( name='Subject', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200)), ('year_of_study', models.IntegerField(null=True)), ], ), migrations.CreateModel( name='Teacher', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('full_name', models.CharField(max_length=200)), ('birthday', models.DateField(blank=True, null=True)), ], ), migrations.CreateModel( name='Сhastisement', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('text', models.TextField()), ('date', models.DateField()), ('schoolkid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Schoolkid')), ('subject', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='datacenter.Subject')), ('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Teacher')), ], ), migrations.AddField( model_name='mark', name='schoolkid', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Schoolkid'), ), migrations.AddField( model_name='mark', name='subject', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Subject'), ), migrations.AddField( model_name='mark', name='teacher', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Teacher'), ), migrations.AddField( model_name='lesson', name='subject', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='datacenter.Subject'), ), migrations.AddField( model_name='lesson', name='teacher', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='datacenter.Teacher'), ), migrations.AddField( model_name='commendation', name='schoolkid', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Schoolkid'), ), migrations.AddField( model_name='commendation', name='subject', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Subject'), ), migrations.AddField( model_name='commendation', name='teacher', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacenter.Teacher'), ), ]
41.92
128
0.567748
495
5,240
5.870707
0.165657
0.035788
0.057811
0.090847
0.849621
0.813489
0.774948
0.750516
0.72161
0.693393
0
0.008897
0.292176
5,240
124
129
42.258065
0.774602
0.012977
0
0.732759
1
0
0.121687
0.004256
0
0
0
0
0
1
0
false
0
0.025862
0
0.060345
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
5b8acd871da63e0d7767d6397d760e0ecf59d11c
6,141
py
Python
prepare_data/dataset_Celeb.py
guxiwuruo/NAN
9d711d17cbd8ad2c3e85c10d9818688879c899b3
[ "MIT" ]
16
2019-05-09T15:04:29.000Z
2021-02-14T09:43:12.000Z
prepare_data/dataset_Celeb.py
guxiwuruo/NAN
9d711d17cbd8ad2c3e85c10d9818688879c899b3
[ "MIT" ]
6
2019-06-04T01:44:22.000Z
2021-11-02T06:50:42.000Z
prepare_data/dataset_Celeb.py
guxiwuruo/NAN
9d711d17cbd8ad2c3e85c10d9818688879c899b3
[ "MIT" ]
5
2019-06-11T08:56:16.000Z
2021-02-01T03:41:37.000Z
import numpy as np import scipy.io as sio import os import pickle def save(lst, path): with open(path, 'wb') as fp: pickle.dump(lst, fp) def fill_str(a): nstr = str(a) for i in range(4 - len(nstr)): nstr = '0' + nstr return nstr def get_close_set(): num_sub = 1000 data_dir = '/home/maoyirong/data/Celebrity-1000/resnet50_128_feat' save_dir = './data/Celeb/close' data_path = './data/Celeb/close/sub_video_seq_{}'.format(num_sub) lst_seq = sio.loadmat(data_path) gallery = lst_seq['gallery'] probe = lst_seq['probe'] print('gallery and probe size {} {}'.format(gallery.shape[0], probe.shape[0])) subs = [] sub_faces = [] for i in range(gallery.shape[0]): sub = fill_str(gallery[i, 0]) video = fill_str(gallery[i, 1]) seq = str(gallery[i, 2]) sub_exist = True if subs.count(sub) == 0: subs.append(sub) sub_faces.append([]) sub_exist = False idx = subs.index(sub) mat_path = os.path.join(data_dir, sub, video, seq + '_h.mat') if os.path.exists(mat_path): arr = sio.loadmat(mat_path) arr = arr['feat'] # arr_h = sio.loadmat(os.path.join(data_dir, sub, video, seq + '.mat')) # arr = np.vstack([arr, arr_h['feat']]) if len(arr) == 0: print(mat_path) continue if len(sub_faces[idx]) == 0: sub_faces[idx] = arr else: sub_faces[idx] = np.vstack([sub_faces[idx], arr]) lst_subs = [] lst_sub_faces = [] min_len = 100000 for i in range(len(subs)): if len(sub_faces[i]) > 0: lst_sub_faces.append(sub_faces[i]) lst_subs.append(subs[i]) if np.size(sub_faces[i], 0) < min_len: min_len = np.size(sub_faces[i], 0) print('min gallery sub faces {}'.format(min_len)) save(lst_sub_faces, os.path.join(save_dir, 'train_{}.bin'.format(num_sub))) ## probe min_len = 100000 lst_probe_video = [] lst_video_no = [] K = 30 for i in range(probe.shape[0]): sub = fill_str(probe[i, 0]) video = fill_str(probe[i, 1]) # seq = str(probe[i, 2]) if lst_subs.count(sub) == 0: continue idx = lst_subs.index(sub) if lst_video_no.count(sub + '_' + video) == 0: lst_mat = [file for file in os.listdir(os.path.join(data_dir, sub, video)) if file.endswith('_h.mat')] arr = [] for mat in lst_mat: mat_path = os.path.join(data_dir, sub, video, mat) if os.path.exists(mat_path): item = sio.loadmat(mat_path) arr.append(item['feat']) arr = np.concatenate(arr, axis=0) # arr_h = sio.loadmat(os.path.join(data_dir, sub, video, seq + '.mat')) # arr = np.vstack([arr, arr_h['feat']]) if arr.shape[0] < K: print(sub + '_' + video) continue lst_probe_video.append([idx, arr]) lst_video_no.append(sub + '_' + video) # print('min proble seq len {}'.format(min_len)) save(lst_probe_video, os.path.join(save_dir, 'test_video_{}.bin'.format(num_sub))) print('finished') def get_close_set_ori(): num_sub = 100 data_dir = '/home/maoyirong/data/Celebrity-1000/resnet50_128_feat' save_dir = './data/Celeb/close' data_path = './data/Celeb/close/sub_video_seq_{}'.format(num_sub) lst_seq = sio.loadmat(data_path) gallery = lst_seq['gallery'] probe = lst_seq['probe'] print('gallery and probe size {} {}'.format(gallery.shape[0], probe.shape[0])) subs = [] sub_faces = [] for i in range(gallery.shape[0]): sub = fill_str(gallery[i, 0]) video = fill_str(gallery[i, 1]) seq = str(gallery[i, 2]) sub_exist = True if subs.count(sub) == 0: subs.append(sub) sub_faces.append([]) sub_exist = False idx = subs.index(sub) mat_path = os.path.join(data_dir, sub, video, seq + '_h.mat') if os.path.exists(mat_path): arr = sio.loadmat(mat_path) arr = arr['feat'] arr_h = sio.loadmat(os.path.join(data_dir, sub, video, seq + '.mat')) arr = np.vstack([arr, arr_h['feat']]) if len(arr) == 0: print(mat_path) continue if len(sub_faces[idx]) == 0: sub_faces[idx] = arr else: sub_faces[idx] = np.vstack([sub_faces[idx], arr]) lst_subs = [] lst_sub_faces = [] min_len = 100000 for i in range(len(subs)): if len(sub_faces[i]) > 0: lst_sub_faces.append(sub_faces[i]) lst_subs.append(subs[i]) if np.size(sub_faces[i],0) < min_len: min_len = np.size(sub_faces[i],0) print('min gallery sub faces {}'.format(min_len)) save(lst_sub_faces, os.path.join(save_dir, 'train_{}.bin'.format(num_sub))) ## probe min_len = 100000 lst_probe_seqs = [] for i in range(probe.shape[0]): sub = fill_str(probe[i, 0]) video = fill_str(probe[i, 1]) seq = str(probe[i, 2]) if lst_subs.count(sub) == 0: continue idx = lst_subs.index(sub) mat_path = os.path.join(data_dir, sub, video, seq + '_h.mat') if os.path.exists(mat_path): arr = sio.loadmat(mat_path) arr = arr['feat'] arr_h = sio.loadmat(os.path.join(data_dir, sub, video, seq + '.mat')) arr = np.vstack([arr, arr_h['feat']]) if len(arr) == 0: print(mat_path) continue if np.size(arr, 0) < min_len: min_len = np.size(arr, 0) lst_probe_seqs.append([idx, arr]) print('min proble seq len {}'.format(min_len)) save(lst_probe_seqs, os.path.join(save_dir, 'test_{}.bin'.format(num_sub))) print('finished') if __name__ == '__main__': get_close_set()
31.331633
114
0.543071
876
6,141
3.599315
0.109589
0.071043
0.041231
0.039962
0.837615
0.831272
0.80019
0.778624
0.778624
0.768474
0
0.021949
0.310047
6,141
195
115
31.492308
0.722209
0.048689
0
0.730263
0
0
0.085577
0.030184
0
0
0
0
0
1
0.026316
false
0
0.026316
0
0.059211
0.072368
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5b8b3e5a6b83e72b3f9769ef8bd0e517f7979809
1,740
py
Python
2014.d/bts.d/sts1-et.d/1-complexe.d/python/1_complexes-ie-1-correction.py
homeostasie/annees-precedentes
db95e1883558eb5f8c67dfdd9923cf00cdf1a70a
[ "MIT" ]
1
2018-12-29T12:46:51.000Z
2018-12-29T12:46:51.000Z
2014.d/bts.d/sts1-et.d/1-complexe.d/python/1_complexes-ie-1-correction.py
homeostasie/annees-precedentes
db95e1883558eb5f8c67dfdd9923cf00cdf1a70a
[ "MIT" ]
null
null
null
2014.d/bts.d/sts1-et.d/1-complexe.d/python/1_complexes-ie-1-correction.py
homeostasie/annees-precedentes
db95e1883558eb5f8c67dfdd9923cf00cdf1a70a
[ "MIT" ]
null
null
null
#!/usr/bin/env python # Pour les complexes, on définit j comme le complexe tel que j² = -1 j=complex(0,1) ; # Série A - Données print(" -------------------------- SÉRIE A ----") print(" ----- Données -----") z0 = 2 + 0*j z1 = 3*j z2 = j + 1 z3 = 3*(j + 1) z4 = -2*j z5 = -j + 1 z6 = -j +2 z7 = 5 + 0*j z8 = -2*j z9 = 2 + j # Affichage des données print("z0 = ",z0) print("z1 = ",z1) print("z2 = ",z2) print("z3 = ",z3) print("z4 = ",z4) print("z5 = ",z5) print("z6 = ",z6) print("z7 = ",z7) print("z8 = ",z8) print("z9 = ",z9) # Calcul print(" ----- Calculs -----") za = z1 + z2 zb = z0 - z3 + z2 zc = z6 + z1 +z4 zd = z7 * z2 ze = z2 + z7 * z2 zf = z2**2 zg = (z1 + z2)**2 zh = 1/z1 zi = z1/z5 zj = z9/z2 # Affichage des données print("za = ",za) print("zb = ",zb) print("zc = ",zc) print("zd = ",zd) print("ze = ",ze) print("zf = ",zf) print("zg = ",zg) print("zh = ",zh) print("zi = ",zi) print("zj = ",zj) # Série B - Données print(" -------------------------- SÉRIE B -----") print(" ----- Données -----") z0 = 1 + 0*j z1 = 2*j z2 = j + 1 z3 = 2*(j + 1) z4 = -4*j z5 = -j - 1 z6 = -j + 2 z7 = 6 + 0*j z8 = -5*j z9 = 2 + j # Affichage des données print("z0 = ",z0) print("z1 = ",z1) print("z2 = ",z2) print("z3 = ",z3) print("z4 = ",z4) print("z5 = ",z5) print("z6 = ",z6) print("z7 = ",z7) print("z8 = ",z8) print("z9 = ",z9) # Calcul print(" ----- Calculs -----") za = z1 + z2 zb = z0 - z3 + z2 zc = z6 + z1 +z4 zd = z7 * z2 ze = z2 + z7 * z2 zf = z2**2 zg = (z1 + z2)**2 zh = 1/z1 zi = z1/z5 zj = z9/z2 # Affichage des données print("za = ",za) print("zb = ",zb) print("zc = ",zc) print("zd = ",zd) print("ze = ",ze) print("zf = ",zf) print("zg = ",zg) print("zh = ",zh) print("zi = ",zi) print("zj = ",zj)
14.262295
68
0.479885
309
1,740
2.702265
0.171521
0.086228
0.091018
0.11497
0.776048
0.759281
0.759281
0.759281
0.732934
0.732934
0
0.103139
0.231034
1,740
121
69
14.380165
0.520927
0.12931
0
0.804598
0
0
0.239867
0.034552
0
0
0
0
0
1
0
false
0
0
0
0
0.528736
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
7
5b9425964b2951bca7febbb95784cc81853f1a74
214
py
Python
alimd/__init__.py
aoguedao/alimd
984142370eb640556561d2f27bfcedf5b4e2dffa
[ "MIT" ]
null
null
null
alimd/__init__.py
aoguedao/alimd
984142370eb640556561d2f27bfcedf5b4e2dffa
[ "MIT" ]
null
null
null
alimd/__init__.py
aoguedao/alimd
984142370eb640556561d2f27bfcedf5b4e2dffa
[ "MIT" ]
null
null
null
# import logging from alimd.constants import * from alimd.utils import * from alimd import gmanova from alimd import simce from alimd import eigen # logging.getLogger(__name__).addHandler(logging.NullHandler())
23.777778
63
0.799065
28
214
5.964286
0.464286
0.269461
0.269461
0
0
0
0
0
0
0
0
0
0.130841
214
9
63
23.777778
0.897849
0.35514
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
5b9dfbfc09e2391f7967be4312af7b717c6eebc2
17,926
py
Python
tests/extract_old_site/modules/test_references.py
aychen99/Excavating-Occaneechi-Town
6e864ca69ff1881554eb4c88aebed236bafbeaf4
[ "MIT" ]
1
2020-10-01T01:07:11.000Z
2020-10-01T01:07:11.000Z
tests/extract_old_site/modules/test_references.py
aychen99/Excavating-Occaneechi-Town
6e864ca69ff1881554eb4c88aebed236bafbeaf4
[ "MIT" ]
null
null
null
tests/extract_old_site/modules/test_references.py
aychen99/Excavating-Occaneechi-Town
6e864ca69ff1881554eb4c88aebed236bafbeaf4
[ "MIT" ]
null
null
null
from src.extract_old_site.modules import references as refs import pytest from pathlib import Path # Portions of /dig/html/split/report282b.html, references pg. 1. report_282b_sample_html_str = """ <html><body bgcolor=white> <a name="ku">Abler, T. S., and Elisabeth Tooker</a><blockquote> 1978 Seneca. In <i>Handbook of North American Indians</i>, vol. 15, edited by Bruce G. Trigger. Smithsonian Institution, Washington, D.C.<p> </blockquote><a name="go">Gwynn, John V.</a><blockquote> 1964 <i>Virginia Upland Game Investigations: Restoration of the Wild Turkey</i>. Annual Report, Virginia Pittman-Robertson Project.<p> </blockquote></body></html> """ # Portions of /dig/html/split/report283b.html, references pg. 2. # Chosen to contain examples of references where the author has no letters # but one of the references does, # where an author has multiple references with letters, # where an author has multiple references without letters, # where an author has a single reference with letters, # and where an author has a single reference without letters. report_283b_sample_html_str = """ <html><body bgcolor=white> <a name="bs">Hale, Horatio</a><blockquote> 1883 The Tutelo Tribe and Language. <i>Proceedings of the American Philosophical Society</i> 21:114:1-47.<p> </blockquote>Hammett, Julia E.<blockquote> 1983 Preliminary Classification of North Carolina Shell Bead Artifacts: Some Indications and Implications. Ms. on file, Research Laboratories of Anthropology, University of North Carolina, Chapel Hill.<p> 1987 Shell Artifacts from the Carolina Piedmont. In <i>The Siouan Project: Seasons I and II</i>, edited by Roy S. Dickens Jr., H. Trawick Ward, and R. P. Stephen Davis, Jr., pp. 167-183. Monograph Series No. 1. Research Laboratories of Anthropology, University of North Carolina, Chapel Hill.<p> </blockquote><a name="iu">Horn, Henry S.</a><blockquote> 1974 The Ecology of Secondary Succession. <i>Annual Review of Ecology and Systematics</i> 5:25-37.<p> 1978 Optimal Tactics of Reproduction and Life-History. In <i>Behavioral Ecology: An Evolutionary Approach</i>, edited by J. R. Krebs and N. B. Davies, pp 411-429. Blackwell Scientific Publications, Oxford.<p> </blockquote><a name="cx">Hudson, Charles M.</a><blockquote> 1970 <i>The Catawba Nation</i>. University of Georgia Press, Athens.<p> <a name="bu">1976</a> <i>The Southeastern Indians</i>. The University of Tennessee Press, Knoxville.<p> </blockquote>Morris, Percy C.<blockquote> 1975 <i>A Field Guide to Shells of the Atlantic and Gulf Coasts and the West Indies</i>. Houghton Mifflin Co., Boston.<p> </blockquote><a name="ax">Morrison, A. J.</a><blockquote> 1921 The Virginia Indian Trade to 1673. <i>William and Mary Quarterly</i> (2nd ser.) 1:217-236.<p> </blockquote>Myers, Albert Cook (editor)<blockquote> 1970 <i>William Penn's Own Account of the Lenni Lenape or Delaware Indians</i>. The Middle Atlantic Press, Somerset.<p> </blockquote>Petherick, Gary L.<blockquote> 1985 Architecture and Features at the Fredricks, Wall, and Mitchum Sites. In <i>The Historic Occaneechi: An Archaeological Investigation of Culture Change. Final Report of 1984 Investigations</i>, edited by Roy S. Dickens, Jr., H. Trawick Ward, and R. P. Stephen Davis, Jr., pp. 53-178. Research Laboratories of Anthropology, University of North Carolina, Chapel Hill.<p> <a name="lk">1987</a> Architecture and Features at the Fredricks, Wall, and Mitchum Sites. In <i>The Siouan Project: Seasons I and II</i>, edited by Roy S. Dickens, Jr., H. Trawick Ward and R. P. Stephen Davis, Jr., pp. 29-80. Monograph Series No. 1, Research Laboratories of Anthropology, University of North Carolina, Chapel Hill.<p> </blockquote><a name="dy">Rush County Clerk of Courts</a><blockquote> 1869 Court Papers: Jefries vs. O'Brien case of 1869. Rushville, Indiana.<p> </blockquote></body></html> """ # Portions of /dig/html/split/report284b.html, references pg. 3 report_284b_sample_html_str = """ <html><body bgcolor=white> <a name="ky">Sahlins, Marshall D.</a><blockquote> 1968 <i>Tribesmen</i>. Prentice-Hall, Inc., Englewood Cliffs, New Jersey.<p> </blockquote>Sainesbury, W. N. (editor)<blockquote> 1893 Calendar of State Papers, Colonial Series, America and the West Indies, 1669-1674. Printed for Her Majesty's Stationery Office by Eyre and Spottiswoods, London.<p> </blockquote><a name="hn">Yarnell, Richard A., and M. Jean Black</a><blockquote> 1983 Temporal Trends Indicated by a Survey of Prehistoric Plant Food Remains from Southeastern North America. Revised version of a paper presented at the 40th Annual Meeting of the Southeastern Archaeological Conference, Columbia, South Carolina.<p> <a name="iz">1985</a> Temporal Trends Indicated by a Survey of Archaic and Woodland Plant Food Remains from Southeastern North America. <i>Southeastern Archaeology</i> 4:93-106.<p> </blockquote></body></html> """ report_282b_extracted = { "refs": { "Abler, T. S., and Elisabeth Tooker": [ ("1978 Seneca. In <i>Handbook of North American Indians</i>, vol. 15, edited " "by Bruce G. Trigger. Smithsonian Institution, Washington, D.C.") ], "Gwynn, John V.": [ ("1964 <i>Virginia Upland Game Investigations: Restoration of the Wild " "Turkey</i>. Annual Report, Virginia Pittman-Robertson Project.") ] }, "hrefsToRefs": { "ku": { "author": "Abler, T. S., and Elisabeth Tooker", "refNum": 0 }, "go": { "author": "Gwynn, John V.", "refNum": 0 } } } report_283b_extracted = { "refs": { "Hale, Horatio": [ ("1883 The Tutelo Tribe and Language. " "<i>Proceedings of the American Philosophical Society</i> 21:114:1-47.") ], "Hammett, Julia E.": [ ("1983 Preliminary Classification of North Carolina Shell Bead Artifacts: " "Some Indications and Implications. Ms. on file, Research Laboratories " "of Anthropology, University of North Carolina, Chapel Hill."), ("1987 Shell Artifacts from the Carolina Piedmont. In <i>The Siouan Project: " "Seasons I and II</i>, edited by Roy S. Dickens Jr., H. Trawick Ward, and R. P. " "Stephen Davis, Jr., pp. 167-183. Monograph Series No. 1. Research " "Laboratories of Anthropology, University of North Carolina, Chapel Hill.") ], "Horn, Henry S.": [ ("1974 The Ecology of Secondary Succession. <i>Annual Review of Ecology and " "Systematics</i> 5:25-37."), ("1978 Optimal Tactics of Reproduction and Life-History. In <i>Behavioral " "Ecology: An Evolutionary Approach</i>, edited by J. R. Krebs and N. B. Davies, " "pp 411-429. Blackwell Scientific Publications, Oxford.") ], "Hudson, Charles M.": [ ("1970 <i>The Catawba Nation</i>. University of Georgia Press, Athens."), ("1976 <i>The Southeastern Indians</i>. The University of Tennessee Press, " "Knoxville.") ], "Morris, Percy C.": [ ("1975 <i>A Field Guide to Shells of the Atlantic and Gulf Coasts and the " "West Indies</i>. Houghton Mifflin Co., Boston.") ], "Morrison, A. J.": [ ("1921 The Virginia Indian Trade to 1673. <i>William and Mary Quarterly</i> " "(2nd ser.) 1:217-236.") ], "Myers, Albert Cook (editor)": [ ("1970 <i>William Penn's Own Account of the Lenni Lenape or Delaware " "Indians</i>. The Middle Atlantic Press, Somerset.") ], "Petherick, Gary L.": [ ("1985 Architecture and Features at the Fredricks, Wall, and Mitchum Sites. " "In <i>The Historic Occaneechi: An Archaeological Investigation of Culture " "Change. Final Report of 1984 Investigations</i>, edited by Roy S. Dickens, " "Jr., H. Trawick Ward, and R. P. Stephen Davis, Jr., pp. 53-178. Research " "Laboratories of Anthropology, University of North Carolina, Chapel Hill."), ("1987 Architecture and Features at the Fredricks, Wall, and Mitchum Sites. " "In <i>The Siouan Project: Seasons I and II</i>, edited by Roy S. Dickens, Jr., " "H. Trawick Ward and R. P. Stephen Davis, Jr., pp. 29-80. Monograph Series No. " "1, Research Laboratories of Anthropology, University of North Carolina, Chapel " "Hill.") ], "Rush County Clerk of Courts": [ ("1869 Court Papers: Jefries vs. O'Brien case of 1869. Rushville, Indiana.") ] }, "hrefsToRefs": { "bs": { "author": "Hale, Horatio", "refNum": 0 }, "iu": { "author": "Horn, Henry S.", "refNum": 0 }, "cx": { "author": "Hudson, Charles M.", "refNum": 0 }, "bu": { "author": "Hudson, Charles M.", "refNum": 1 }, "ax": { "author": "Morrison, A. J.", "refNum": 0 }, "lk": { "author": "Petherick, Gary L.", "refNum": 1 }, "dy": { "author": "Rush County Clerk of Courts", "refNum": 0 } } } report_284b_extracted = { "refs": { "Sahlins, Marshall D.": [ ("1968 <i>Tribesmen</i>. Prentice-Hall, Inc., Englewood Cliffs, New " "Jersey.") ], "Sainesbury, W. N. (editor)": [ ("1893 Calendar of State Papers, Colonial Series, America and the West " "Indies, 1669-1674. Printed for Her Majesty's Stationery Office by Eyre and " "Spottiswoods, London.") ], "Yarnell, Richard A., and M. Jean Black": [ ("1983 Temporal Trends Indicated by a Survey of Prehistoric Plant Food " "Remains from Southeastern North America. Revised version of a paper presented " "at the 40th Annual Meeting of the Southeastern Archaeological Conference, " "Columbia, South Carolina."), ("1985 Temporal Trends Indicated by a Survey of Archaic and Woodland Plant " "Food Remains from Southeastern North America. <i>Southeastern Archaeology</i> " "4:93-106.") ] }, "hrefsToRefs": { "ky": { "author": "Sahlins, Marshall D.", "refNum": 0 }, "hn": { "author": "Yarnell, Richard A., and M. Jean Black", "refNum": 0 }, "iz": { "author": "Yarnell, Richard A., and M. Jean Black", "refNum": 1 } } } all_sample_refs_extracted = { "refs": { "Abler, T. S., and Elisabeth Tooker": [ ("1978 Seneca. In <i>Handbook of North American Indians</i>, vol. 15, edited " "by Bruce G. Trigger. Smithsonian Institution, Washington, D.C.") ], "Gwynn, John V.": [ ("1964 <i>Virginia Upland Game Investigations: Restoration of the Wild " "Turkey</i>. Annual Report, Virginia Pittman-Robertson Project.") ], "Hale, Horatio": [ ("1883 The Tutelo Tribe and Language. " "<i>Proceedings of the American Philosophical Society</i> 21:114:1-47.") ], "Hammett, Julia E.": [ ("1983 Preliminary Classification of North Carolina Shell Bead Artifacts: " "Some Indications and Implications. Ms. on file, Research Laboratories " "of Anthropology, University of North Carolina, Chapel Hill."), ("1987 Shell Artifacts from the Carolina Piedmont. In <i>The Siouan Project: " "Seasons I and II</i>, edited by Roy S. Dickens Jr., H. Trawick Ward, and R. P. " "Stephen Davis, Jr., pp. 167-183. Monograph Series No. 1. Research " "Laboratories of Anthropology, University of North Carolina, Chapel Hill.") ], "Horn, Henry S.": [ ("1974 The Ecology of Secondary Succession. <i>Annual Review of Ecology and " "Systematics</i> 5:25-37."), ("1978 Optimal Tactics of Reproduction and Life-History. In <i>Behavioral " "Ecology: An Evolutionary Approach</i>, edited by J. R. Krebs and N. B. Davies, " "pp 411-429. Blackwell Scientific Publications, Oxford.") ], "Hudson, Charles M.": [ ("1970 <i>The Catawba Nation</i>. University of Georgia Press, Athens."), ("1976 <i>The Southeastern Indians</i>. The University of Tennessee Press, " "Knoxville.") ], "Morris, Percy C.": [ ("1975 <i>A Field Guide to Shells of the Atlantic and Gulf Coasts and the " "West Indies</i>. Houghton Mifflin Co., Boston.") ], "Morrison, A. J.": [ ("1921 The Virginia Indian Trade to 1673. <i>William and Mary Quarterly</i> " "(2nd ser.) 1:217-236.") ], "Myers, Albert Cook (editor)": [ ("1970 <i>William Penn's Own Account of the Lenni Lenape or Delaware " "Indians</i>. The Middle Atlantic Press, Somerset.") ], "Petherick, Gary L.": [ ("1985 Architecture and Features at the Fredricks, Wall, and Mitchum Sites. " "In <i>The Historic Occaneechi: An Archaeological Investigation of Culture " "Change. Final Report of 1984 Investigations</i>, edited by Roy S. Dickens, " "Jr., H. Trawick Ward, and R. P. Stephen Davis, Jr., pp. 53-178. Research " "Laboratories of Anthropology, University of North Carolina, Chapel Hill."), ("1987 Architecture and Features at the Fredricks, Wall, and Mitchum Sites. " "In <i>The Siouan Project: Seasons I and II</i>, edited by Roy S. Dickens, Jr., " "H. Trawick Ward and R. P. Stephen Davis, Jr., pp. 29-80. Monograph Series No. " "1, Research Laboratories of Anthropology, University of North Carolina, Chapel " "Hill.") ], "Rush County Clerk of Courts": [ ("1869 Court Papers: Jefries vs. O'Brien case of 1869. Rushville, Indiana.") ], "Sahlins, Marshall D.": [ ("1968 <i>Tribesmen</i>. Prentice-Hall, Inc., Englewood Cliffs, New " "Jersey.") ], "Sainesbury, W. N. (editor)": [ ("1893 Calendar of State Papers, Colonial Series, America and the West " "Indies, 1669-1674. Printed for Her Majesty's Stationery Office by Eyre and " "Spottiswoods, London.") ], "Yarnell, Richard A., and M. Jean Black": [ ("1983 Temporal Trends Indicated by a Survey of Prehistoric Plant Food " "Remains from Southeastern North America. Revised version of a paper presented " "at the 40th Annual Meeting of the Southeastern Archaeological Conference, " "Columbia, South Carolina."), ("1985 Temporal Trends Indicated by a Survey of Archaic and Woodland Plant " "Food Remains from Southeastern North America. <i>Southeastern Archaeology</i> " "4:93-106.") ] }, "hrefsToRefs": { "ku": { "author": "Abler, T. S., and Elisabeth Tooker", "refNum": 0 }, "go": { "author": "Gwynn, John V.", "refNum": 0 }, "bs": { "author": "Hale, Horatio", "refNum": 0 }, "iu": { "author": "Horn, Henry S.", "refNum": 0 }, "cx": { "author": "Hudson, Charles M.", "refNum": 0 }, "bu": { "author": "Hudson, Charles M.", "refNum": 1 }, "ax": { "author": "Morrison, A. J.", "refNum": 0 }, "lk": { "author": "Petherick, Gary L.", "refNum": 1 }, "dy": { "author": "Rush County Clerk of Courts", "refNum": 0 }, "ky": { "author": "Sahlins, Marshall D.", "refNum": 0 }, "hn": { "author": "Yarnell, Richard A., and M. Jean Black", "refNum": 0 }, "iz": { "author": "Yarnell, Richard A., and M. Jean Black", "refNum": 1 } } } def mock_readfile(filename, parent_dir_path_obj): if parent_dir_path_obj.as_posix() == "C:/dig/html/part6": pass if parent_dir_path_obj.as_posix() == "C:/dig/html/split": if filename == "report282b.html": return report_282b_sample_html_str elif filename == "report283b.html": return report_283b_sample_html_str elif filename == "report284b.html": return report_284b_sample_html_str raise Exception("failed to find file path in mock_readfile") @pytest.mark.parametrize("ref_reportb_html_str,expected_results", [ (report_282b_sample_html_str, report_282b_extracted), (report_283b_sample_html_str, report_283b_extracted), (report_284b_sample_html_str, report_284b_extracted) ]) def test_extract_references_page(ref_reportb_html_str, expected_results): assert refs.extract_references_page(ref_reportb_html_str) == expected_results def test_extract_all_references(): assert refs.extract_all_references("C:/", mock_readfile) == all_sample_refs_extracted # TODO # def test_validate_ref_page(): # pass # def test_validate_all_ref_pages(): # pass
44.371287
94
0.5974
2,202
17,926
4.818347
0.169391
0.007917
0.021206
0.038454
0.879359
0.855702
0.843921
0.83525
0.83525
0.806786
0
0.042137
0.283778
17,926
403
95
44.48139
0.784251
0.033694
0
0.648
0
0.050667
0.698041
0.015832
0
0
0
0.002481
0.005333
1
0.008
false
0.002667
0.008
0
0.024
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7518dbe6b956dcc8a2243b655ed4c95942b34ab5
1,512
py
Python
record/cd/forms.py
Brayton-Han/Brayton-s-Record
67cb6f7b17d8cb2c5f428079afb091f12b015f5c
[ "MIT" ]
null
null
null
record/cd/forms.py
Brayton-Han/Brayton-s-Record
67cb6f7b17d8cb2c5f428079afb091f12b015f5c
[ "MIT" ]
null
null
null
record/cd/forms.py
Brayton-Han/Brayton-s-Record
67cb6f7b17d8cb2c5f428079afb091f12b015f5c
[ "MIT" ]
null
null
null
from collections import defaultdict from django import forms class QuerycdForm(forms.Form): name = forms.CharField(max_length=128, required=False) artist = forms.CharField(max_length=128, required=False) explicit = forms.BooleanField(required=False) seal_off = forms.BooleanField(required=False) class QueryvinylForm(forms.Form): name = forms.CharField(max_length=128, required=False) artist = forms.CharField(max_length=128, required=False) second_hand = forms.BooleanField(required=False) class EditcdForm(forms.Form): barcode = forms.CharField(max_length=13) name = forms.CharField(max_length=128) artist = forms.CharField(max_length=128, required=False) number = forms.IntegerField(required=False) genre = forms.CharField(max_length=128, required=False) produce_area = forms.CharField(max_length=128) price = forms.FloatField() cost = forms.FloatField() seal_off = forms.BooleanField(required=False) explicit = forms.BooleanField(required=False) class EditvinylForm(forms.Form): barcode = forms.CharField(max_length=13) name = forms.CharField(max_length=128) artist = forms.CharField(max_length=128, required=False) number = forms.IntegerField(required=False) genre = forms.CharField(max_length=128, required=False) produce_area = forms.CharField(max_length=128) price = forms.FloatField() cost = forms.FloatField() second_hand = forms.BooleanField(required=False)
42
61
0.733466
182
1,512
5.983516
0.197802
0.191001
0.218549
0.295684
0.902663
0.888889
0.759412
0.677686
0.677686
0.677686
0
0.031571
0.162037
1,512
36
62
42
0.82794
0
0
0.8125
0
0
0
0
0
0
0
0
0
1
0
false
0
0.0625
0
1
0
0
0
0
null
0
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
8
7526e258d61d7289130f394b1c9a8ccb2b84a516
68
py
Python
libapp/sitemap/__init__.py
octopi-labs/bazzinga
fcdd8e925cbaa31cacbec10dd1d599e72b84c588
[ "MIT" ]
null
null
null
libapp/sitemap/__init__.py
octopi-labs/bazzinga
fcdd8e925cbaa31cacbec10dd1d599e72b84c588
[ "MIT" ]
3
2021-03-20T00:43:11.000Z
2022-01-06T22:33:23.000Z
libapp/sitemap/__init__.py
octopi-labs/bazzinga
fcdd8e925cbaa31cacbec10dd1d599e72b84c588
[ "MIT" ]
null
null
null
from libapp.sitemap import models from libapp.sitemap import sitemap
34
34
0.867647
10
68
5.9
0.5
0.338983
0.576271
0.779661
0
0
0
0
0
0
0
0
0.102941
68
2
34
34
0.967213
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8