hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
919ce489e3336c2de064e12b4e8a7140d18128c6
| 481
|
py
|
Python
|
Chapter6/Ex6.3.py
|
iliankostadinov/thinkpython
|
767223dbcd8efd7ab309a74b58b62fe288b84225
|
[
"Apache-2.0"
] | 1
|
2018-12-08T16:18:05.000Z
|
2018-12-08T16:18:05.000Z
|
Chapter6/Ex6.3.py
|
iliankostadinov/thinkpython
|
767223dbcd8efd7ab309a74b58b62fe288b84225
|
[
"Apache-2.0"
] | null | null | null |
Chapter6/Ex6.3.py
|
iliankostadinov/thinkpython
|
767223dbcd8efd7ab309a74b58b62fe288b84225
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Check if entered string is palindrome
def first(word):
return word[0]
def last(word):
return word[-1]
def middle(word):
return word[1:-1]
def is_palindrome(word):
if len(word) == 2 and first(word) == last(word):
return True
if len(word) == 3 and first(word) == last(word):
return True
if first(word) != last(word):
return False
return is_palindrome(middle(word))
print(is_palindrome('nooxn'))
| 16.586207
| 52
| 0.623701
| 71
| 481
| 4.183099
| 0.366197
| 0.20202
| 0.188552
| 0.171717
| 0.292929
| 0.215488
| 0.215488
| 0.215488
| 0
| 0
| 0
| 0.019126
| 0.239085
| 481
| 28
| 53
| 17.178571
| 0.79235
| 0.122661
| 0
| 0.133333
| 0
| 0
| 0.011905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0
| 0.2
| 0.733333
| 0.066667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
91a9238d1c6ed56c7617f43a0e346def12b4b2d6
| 349
|
py
|
Python
|
exprimo/optimizers/__init__.py
|
Lagostra/exprimo
|
41c4142dede579b0e4dc4e590af013785eae4bc2
|
[
"MIT"
] | 3
|
2020-03-30T20:54:17.000Z
|
2022-01-18T17:06:02.000Z
|
exprimo/optimizers/__init__.py
|
Lagostra/exprimo
|
41c4142dede579b0e4dc4e590af013785eae4bc2
|
[
"MIT"
] | null | null | null |
exprimo/optimizers/__init__.py
|
Lagostra/exprimo
|
41c4142dede579b0e4dc4e590af013785eae4bc2
|
[
"MIT"
] | null | null | null |
from .hill_climbing import HillClimbingOptimizer, RandomHillClimbingOptimizer
from .linear_search import LinearSearchOptimizer
from .simulated_annealing import SimulatedAnnealingOptimizer, exponential_multiplicative_decay, \
ScipySimulatedAnnealingOptimizer
from .genetic_algorithm import GAOptimizer
from .map_elites import MapElitesOptimizer
| 43.625
| 97
| 0.891117
| 30
| 349
| 10.133333
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083095
| 349
| 7
| 98
| 49.857143
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
37df9a9899733ab8f073ba0668dc7c4deb8cbd6b
| 9,296
|
py
|
Python
|
google-datacatalog-rdbms-connector/tests/google/datacatalog_connectors/rdbms/prepare/datacatalog_tag_factory_test.py
|
brucearctor/datacatalog-connectors-rdbms
|
7ff5dc858ea7aa21486343304fc281692480cdb8
|
[
"Apache-2.0"
] | 46
|
2020-04-27T21:55:50.000Z
|
2022-02-06T04:34:06.000Z
|
google-datacatalog-rdbms-connector/tests/google/datacatalog_connectors/rdbms/prepare/datacatalog_tag_factory_test.py
|
brucearctor/datacatalog-connectors-rdbms
|
7ff5dc858ea7aa21486343304fc281692480cdb8
|
[
"Apache-2.0"
] | 45
|
2020-05-20T21:09:04.000Z
|
2022-03-24T00:14:30.000Z
|
google-datacatalog-rdbms-connector/tests/google/datacatalog_connectors/rdbms/prepare/datacatalog_tag_factory_test.py
|
brucearctor/datacatalog-connectors-rdbms
|
7ff5dc858ea7aa21486343304fc281692480cdb8
|
[
"Apache-2.0"
] | 47
|
2020-05-02T14:48:06.000Z
|
2022-03-28T22:12:22.000Z
|
#!/usr/bin/python
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from google.datacatalog_connectors.commons_test import utils
from google.datacatalog_connectors.rdbms.prepare import \
datacatalog_tag_factory
from google.cloud import datacatalog
class DataCatalogTagFactoryTest(unittest.TestCase):
__MODULE_PATH = os.path.dirname(os.path.abspath(__file__))
def test_tag_for_table_container_should_set_all_available_fields( # noqa:E501
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
factory = datacatalog_tag_factory.DataCatalogTagFactory(metadata_def)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
schema_dict = {
'creator': 'creator_test',
'owner': 'owner_test',
'update_user': 'update_user_test',
'tables': [{}, {}]
}
tag = factory. \
make_tag_for_table_container_metadata(tag_template, schema_dict)
self.assertEqual(2, tag.fields['num_tables'].double_value)
self.assertEqual('creator_test',
tag.fields['schema_creator'].string_value)
self.assertEqual('owner_test', tag.fields['schema_owner'].string_value)
self.assertEqual('update_user_test',
tag.fields['schema_update_user'].string_value)
def test_tag_for_table_container_missing_fields_should_succeed( # noqa:E501
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
factory = datacatalog_tag_factory.DataCatalogTagFactory(metadata_def)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
schema_dict = {'tables': [{}, {}]}
tag = factory. \
make_tag_for_table_container_metadata(tag_template, schema_dict)
self.assertEqual(2, tag.fields['num_tables'].double_value)
def test_make_tag_for_table_metadata_should_set_all_available_fields(
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
factory = datacatalog_tag_factory.DataCatalogTagFactory(metadata_def)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
tables_dict = {
'creator': 'creator_test',
'owner': 'owner_test',
'update_user': 'update_user_test',
'num_rows': 5,
'table_size_MB': 2.0,
'table_type': 'ROW',
'has_primary_key': 'TRUE'
}
tag = factory. \
make_tag_for_table_metadata(tag_template, tables_dict, 'schema')
self.assertEqual('schema', tag.fields['schema_name'].string_value)
self.assertEqual('creator_test',
tag.fields['table_creator'].string_value)
self.assertEqual('owner_test', tag.fields['table_owner'].string_value)
self.assertEqual('update_user_test',
tag.fields['table_update_user'].string_value)
self.assertEqual(5, tag.fields['num_rows'].double_value)
self.assertEqual(2.0, tag.fields['table_size_MB'].double_value)
self.assertEqual('ROW', tag.fields['table_type'].string_value)
self.assertEqual(True, tag.fields['has_primary_key'].bool_value)
def test_tag_for_table_nan_values_should_set_all_available_fields( # noqa:E501
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
factory = datacatalog_tag_factory.DataCatalogTagFactory(metadata_def)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
tables_dict = {
'creator': 'creator_test',
'owner': 'owner_test',
'update_user': 'update_user_test',
'num_rows': float('nan'),
'table_size_MB': float('nan')
}
tag = factory. \
make_tag_for_table_metadata(tag_template, tables_dict, 'schema')
self.assertEqual('schema', tag.fields['schema_name'].string_value)
self.assertEqual('creator_test',
tag.fields['table_creator'].string_value)
self.assertEqual('owner_test', tag.fields['table_owner'].string_value)
self.assertEqual('update_user_test',
tag.fields['table_update_user'].string_value)
self.assertEqual(0, tag.fields['num_rows'].double_value)
self.assertEqual(0, tag.fields['table_size_MB'].double_value)
def test_tag_for_table_zero_values_should_set_all_available_fields( # noqa:E501
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
factory = datacatalog_tag_factory.DataCatalogTagFactory(metadata_def)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
tables_dict = {
'creator': 'creator_test',
'owner': 'owner_test',
'update_user': 'update_user_test',
'num_rows': 0,
'table_size_MB': 0
}
tag = factory. \
make_tag_for_table_metadata(tag_template, tables_dict, 'schema')
self.assertEqual('schema', tag.fields['schema_name'].string_value)
self.assertEqual('creator_test',
tag.fields['table_creator'].string_value)
self.assertEqual('owner_test', tag.fields['table_owner'].string_value)
self.assertEqual('update_user_test',
tag.fields['table_update_user'].string_value)
self.assertEqual(0, tag.fields['num_rows'].double_value)
self.assertEqual(0, tag.fields['table_size_MB'].double_value)
def test_make_tag_for_table_metadata_with_database_should_succeed(
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
metadata_def['database_name'] = 'test_db'
factory = datacatalog_tag_factory. \
DataCatalogTagFactory(
metadata_def
)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
tables_dict = {
'creator': 'creator_test',
'owner': 'owner_test',
'update_user': 'update_user_test',
'num_rows': 5
}
tag = factory. \
make_tag_for_table_metadata(tag_template, tables_dict, 'schema')
self.assertEqual('test_db', tag.fields['database_name'].string_value)
self.assertEqual('schema', tag.fields['schema_name'].string_value)
self.assertEqual('creator_test',
tag.fields['table_creator'].string_value)
self.assertEqual('owner_test', tag.fields['table_owner'].string_value)
self.assertEqual('update_user_test',
tag.fields['table_update_user'].string_value)
self.assertEqual(5, tag.fields['num_rows'].double_value)
def test_make_tag_for_table_metadata_missing_fields_should_succeed(
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
factory = datacatalog_tag_factory.DataCatalogTagFactory(metadata_def)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
tables_dict = {'num_rows': 5}
tag = factory. \
make_tag_for_table_metadata(tag_template, tables_dict, 'schema')
self.assertEqual('schema', tag.fields['schema_name'].string_value)
self.assertEqual(5, tag.fields['num_rows'].double_value)
def test_make_tags_for_columns_metadata_should_set_all_available_fields(
self): # noqa:E125
metadata_def = utils.Utils.get_metadata_def_obj(self.__MODULE_PATH)
factory = datacatalog_tag_factory.DataCatalogTagFactory(metadata_def)
tag_template = datacatalog.TagTemplate()
tag_template.name = 'template_name'
tables_dict = {
'columns': [{
'name': 'col_1',
'masked': 'TRUE',
'mask_expression': 'XXX-XXX-XXX'
}, {
'name': 'col_2',
'masked': 'FALSE'
}]
}
tags = factory. \
make_tags_for_columns_metadata(tag_template, tables_dict)
tag = tags[0]
self.assertEqual(True, tag.fields['masked'].bool_value)
self.assertEqual('XXX-XXX-XXX',
tag.fields['mask_expression'].string_value)
self.assertEqual('col_1', tag.column)
# Only 1 tag should be created.
self.assertEqual(1, len(tags))
| 39.223629
| 84
| 0.65028
| 1,066
| 9,296
| 5.301126
| 0.142589
| 0.098213
| 0.099098
| 0.101221
| 0.781101
| 0.757742
| 0.741285
| 0.735091
| 0.726774
| 0.706424
| 0
| 0.0097
| 0.245912
| 9,296
| 236
| 85
| 39.389831
| 0.796434
| 0.076915
| 0
| 0.633721
| 0
| 0
| 0.15515
| 0
| 0
| 0
| 0
| 0
| 0.215116
| 1
| 0.046512
| false
| 0
| 0.02907
| 0
| 0.087209
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
37e4b100bfe86809bdefb91a443a7650bb0e5083
| 66
|
py
|
Python
|
xpisign/__init__.py
|
mozilla/xpisign.py
|
99f36a53dfb84214af783fd6b3960136eeae3466
|
[
"CC0-1.0"
] | 13
|
2015-03-19T15:05:41.000Z
|
2021-10-16T01:28:23.000Z
|
xpisign/__init__.py
|
mozilla/xpisign.py
|
99f36a53dfb84214af783fd6b3960136eeae3466
|
[
"CC0-1.0"
] | 1
|
2017-01-02T14:00:40.000Z
|
2017-01-02T14:00:40.000Z
|
xpisign/__init__.py
|
nmaier/xpisign.py
|
3a7107ae08c49a9d49e65e0f5f038944bd6a85d2
|
[
"CC0-1.0"
] | 4
|
2015-08-28T23:34:28.000Z
|
2016-10-31T20:31:59.000Z
|
from .api import xpisign, __version__
from .compat import BytesIO
| 22
| 37
| 0.818182
| 9
| 66
| 5.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 66
| 2
| 38
| 33
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
532e4c39daa0198ab378ac136c782f4091aea582
| 64
|
py
|
Python
|
start.py
|
usc-isi-i2/dig-sandpaper
|
c7a905ceec28ad0cc9e7da7ede2fd3d2fc93c3d6
|
[
"MIT"
] | 4
|
2017-06-16T14:00:32.000Z
|
2021-02-20T07:16:54.000Z
|
start.py
|
usc-isi-i2/dig-sandpaper
|
c7a905ceec28ad0cc9e7da7ede2fd3d2fc93c3d6
|
[
"MIT"
] | 7
|
2017-06-24T23:03:31.000Z
|
2018-04-26T23:47:37.000Z
|
start.py
|
usc-isi-i2/dig-sandpaper
|
c7a905ceec28ad0cc9e7da7ede2fd3d2fc93c3d6
|
[
"MIT"
] | 3
|
2017-07-31T14:12:56.000Z
|
2021-02-20T07:16:55.000Z
|
from digsandpaper import main
import sys
main.main(sys.argv[1:])
| 21.333333
| 29
| 0.796875
| 11
| 64
| 4.636364
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.09375
| 64
| 3
| 30
| 21.333333
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
533dcc2129ff0b5f987c73779232cc54aa76e356
| 120
|
py
|
Python
|
test code/importExample.py
|
originaltype/typeProofer
|
589bb96c2c83405c3b2d7ad8622aa5e027e47ded
|
[
"MIT"
] | 12
|
2021-04-21T17:48:52.000Z
|
2022-03-30T01:20:52.000Z
|
test code/importExample.py
|
originaltype/typeProofer
|
589bb96c2c83405c3b2d7ad8622aa5e027e47ded
|
[
"MIT"
] | 1
|
2021-05-04T07:28:04.000Z
|
2021-05-04T20:10:13.000Z
|
test code/importExample.py
|
originaltype/typeProofer
|
589bb96c2c83405c3b2d7ad8622aa5e027e47ded
|
[
"MIT"
] | 2
|
2021-04-21T08:12:11.000Z
|
2021-04-21T17:51:04.000Z
|
from typeProofer import collectFilesPaths, initPage
print(collectFilesPaths)
print(collectFilesPaths('txt'))
initPage()
| 24
| 51
| 0.841667
| 11
| 120
| 9.181818
| 0.636364
| 0.435644
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 120
| 5
| 52
| 24
| 0.901786
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
725e9b1bea10f8cc644e36a7b88364c44db9325b
| 5,787
|
gyp
|
Python
|
deps/open-zwave/libopenzwave.gyp
|
rushidesai/node-openzwave
|
5eb3cb9d1082e2a02ad6fb60f4b2fef6c8c37e08
|
[
"ISC"
] | 77
|
2015-01-07T22:37:30.000Z
|
2021-03-21T11:44:55.000Z
|
deps/open-zwave/libopenzwave.gyp
|
rushidesai/node-openzwave
|
5eb3cb9d1082e2a02ad6fb60f4b2fef6c8c37e08
|
[
"ISC"
] | 28
|
2015-01-08T00:12:45.000Z
|
2019-05-21T10:58:44.000Z
|
deps/open-zwave/libopenzwave.gyp
|
rushidesai/node-openzwave
|
5eb3cb9d1082e2a02ad6fb60f4b2fef6c8c37e08
|
[
"ISC"
] | 39
|
2015-01-07T01:10:24.000Z
|
2018-02-09T12:40:22.000Z
|
{
"targets": [
{
"target_name": "libopenzwave",
"product_prefix": "lib",
"type": "static_library",
"sources": [
"cpp/tinyxml/tinystr.cpp",
"cpp/tinyxml/tinyxml.cpp",
"cpp/tinyxml/tinyxmlerror.cpp",
"cpp/tinyxml/tinyxmlparser.cpp",
"cpp/src/aes/aes_modes.c",
"cpp/src/aes/aescrypt.c",
"cpp/src/aes/aeskey.c",
"cpp/src/aes/aestab.c",
"cpp/src/command_classes/Alarm.cpp",
"cpp/src/command_classes/ApplicationStatus.cpp",
"cpp/src/command_classes/Association.cpp",
"cpp/src/command_classes/AssociationCommandConfiguration.cpp",
"cpp/src/command_classes/Basic.cpp",
"cpp/src/command_classes/BasicWindowCovering.cpp",
"cpp/src/command_classes/Battery.cpp",
"cpp/src/command_classes/CRC16Encap.cpp",
"cpp/src/command_classes/ClimateControlSchedule.cpp",
"cpp/src/command_classes/Clock.cpp",
"cpp/src/command_classes/CommandClass.cpp",
"cpp/src/command_classes/CommandClasses.cpp",
"cpp/src/command_classes/Configuration.cpp",
"cpp/src/command_classes/ControllerReplication.cpp",
"cpp/src/command_classes/DoorLock.cpp",
"cpp/src/command_classes/DoorLockLogging.cpp",
"cpp/src/command_classes/EnergyProduction.cpp",
"cpp/src/command_classes/Hail.cpp",
"cpp/src/command_classes/Indicator.cpp",
"cpp/src/command_classes/Language.cpp",
"cpp/src/command_classes/Lock.cpp",
"cpp/src/command_classes/ManufacturerSpecific.cpp",
"cpp/src/command_classes/Meter.cpp",
"cpp/src/command_classes/MeterPulse.cpp",
"cpp/src/command_classes/MultiCmd.cpp",
"cpp/src/command_classes/MultiInstance.cpp",
"cpp/src/command_classes/MultiInstanceAssociation.cpp",
"cpp/src/command_classes/NoOperation.cpp",
"cpp/src/command_classes/NodeNaming.cpp",
"cpp/src/command_classes/Powerlevel.cpp",
"cpp/src/command_classes/Proprietary.cpp",
"cpp/src/command_classes/Protection.cpp",
"cpp/src/command_classes/SceneActivation.cpp",
"cpp/src/command_classes/Security.cpp",
"cpp/src/command_classes/SensorAlarm.cpp",
"cpp/src/command_classes/SensorBinary.cpp",
"cpp/src/command_classes/SensorMultilevel.cpp",
"cpp/src/command_classes/SwitchAll.cpp",
"cpp/src/command_classes/SwitchBinary.cpp",
"cpp/src/command_classes/SwitchMultilevel.cpp",
"cpp/src/command_classes/SwitchToggleBinary.cpp",
"cpp/src/command_classes/SwitchToggleMultilevel.cpp",
"cpp/src/command_classes/ThermostatFanMode.cpp",
"cpp/src/command_classes/ThermostatFanState.cpp",
"cpp/src/command_classes/ThermostatMode.cpp",
"cpp/src/command_classes/ThermostatOperatingState.cpp",
"cpp/src/command_classes/ThermostatSetpoint.cpp",
"cpp/src/command_classes/TimeParameters.cpp",
"cpp/src/command_classes/UserCode.cpp",
"cpp/src/command_classes/Version.cpp",
"cpp/src/command_classes/WakeUp.cpp",
"cpp/src/value_classes/Value.cpp",
"cpp/src/value_classes/ValueBool.cpp",
"cpp/src/value_classes/ValueButton.cpp",
"cpp/src/value_classes/ValueByte.cpp",
"cpp/src/value_classes/ValueDecimal.cpp",
"cpp/src/value_classes/ValueInt.cpp",
"cpp/src/value_classes/ValueList.cpp",
"cpp/src/value_classes/ValueRaw.cpp",
"cpp/src/value_classes/ValueSchedule.cpp",
"cpp/src/value_classes/ValueShort.cpp",
"cpp/src/value_classes/ValueStore.cpp",
"cpp/src/value_classes/ValueString.cpp",
"cpp/src/platform/Controller.cpp",
"cpp/src/platform/Event.cpp",
"cpp/src/platform/FileOps.cpp",
"cpp/src/platform/HidController.cpp",
"cpp/src/platform/Log.cpp",
"cpp/src/platform/Mutex.cpp",
"cpp/src/platform/SerialController.cpp",
"cpp/src/platform/Stream.cpp",
"cpp/src/platform/Thread.cpp",
"cpp/src/platform/TimeStamp.cpp",
"cpp/src/platform/Wait.cpp",
"cpp/src/platform/unix/EventImpl.cpp",
"cpp/src/platform/unix/FileOpsImpl.cpp",
"cpp/src/platform/unix/LogImpl.cpp",
"cpp/src/platform/unix/MutexImpl.cpp",
"cpp/src/platform/unix/SerialControllerImpl.cpp",
"cpp/src/platform/unix/ThreadImpl.cpp",
"cpp/src/platform/unix/TimeStampImpl.cpp",
"cpp/src/platform/unix/WaitImpl.cpp",
"cpp/src/Driver.cpp",
"cpp/src/Group.cpp",
"cpp/src/Manager.cpp",
"cpp/src/Msg.cpp",
"cpp/src/Node.cpp",
"cpp/src/Options.cpp",
"cpp/src/Scene.cpp",
"cpp/src/Utils.cpp",
"cpp/src/vers.cpp"
],
"include_dirs": [
"cpp/hidapi/hidapi",
"cpp/src",
"cpp/src/command_classes",
"cpp/src/platform",
"cpp/src/platform/unix",
"cpp/src/value_classes",
"cpp/tinyxml"
],
"configurations": {
"Release": {
"cflags": [
"-Wno-ignored-qualifiers",
"-Wno-tautological-undefined-compare",
"-Wno-unknown-pragmas"
],
"xcode_settings": {
"OTHER_CFLAGS": [
"-Wno-ignored-qualifiers",
"-Wno-tautological-undefined-compare",
"-Wno-unknown-pragmas"
]
}
}
},
"conditions": [
['OS=="linux"', {
"sources": [
"cpp/hidapi/linux/hid.c"
]
}],
['OS=="mac"', {
"sources": [
"cpp/hidapi/mac/hid.c"
],
"defines": [
"DARWIN"
]
}]
]
}
]
}
| 38.324503
| 70
| 0.609642
| 635
| 5,787
| 5.44252
| 0.223622
| 0.173611
| 0.236979
| 0.300926
| 0.497106
| 0.042824
| 0.042824
| 0.042824
| 0.042824
| 0.042824
| 0
| 0.000454
| 0.238293
| 5,787
| 150
| 71
| 38.58
| 0.783575
| 0
| 0
| 0.086667
| 0
| 0
| 0.685675
| 0.599447
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
725f56236e16d0238e07ba4dc3098ded78592290
| 126
|
py
|
Python
|
hikyuu/admin/dialog/__init__.py
|
wondertrader/hikyuu
|
b58112c70e50538eb96448acb8a7bd7d98a39a96
|
[
"MIT"
] | 2
|
2021-09-16T06:09:03.000Z
|
2021-12-22T06:44:53.000Z
|
hikyuu/admin/dialog/__init__.py
|
dasuren/hikyuu
|
d1a1a43c10653d17ac91446e4499e6cfbfdbce12
|
[
"MIT"
] | null | null | null |
hikyuu/admin/dialog/__init__.py
|
dasuren/hikyuu
|
d1a1a43c10653d17ac91446e4499e6cfbfdbce12
|
[
"MIT"
] | 1
|
2021-12-19T23:52:13.000Z
|
2021-12-19T23:52:13.000Z
|
# -*- coding: utf-8 -*-
from .HkuEditSessionDialog import HkuEditSessionDialog
from .HkuAddUserDialog import HkuAddUserDialog
| 31.5
| 54
| 0.809524
| 11
| 126
| 9.272727
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00885
| 0.103175
| 126
| 4
| 55
| 31.5
| 0.893805
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7261a8f382a953116cb67c801b1af7cbcfbb7c6b
| 201
|
py
|
Python
|
mnistetude/actfunc/sigmoid.py
|
aram-father/mnist-etude
|
3f9196a7ae3447c3412507896c3dc3ae81f9d7d0
|
[
"BSD-2-Clause"
] | null | null | null |
mnistetude/actfunc/sigmoid.py
|
aram-father/mnist-etude
|
3f9196a7ae3447c3412507896c3dc3ae81f9d7d0
|
[
"BSD-2-Clause"
] | 1
|
2022-03-18T03:14:13.000Z
|
2022-03-18T03:14:13.000Z
|
mnistetude/actfunc/sigmoid.py
|
aram-father/mnist-etude
|
3f9196a7ae3447c3412507896c3dc3ae81f9d7d0
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
from . import iactfunc
class Sigmoid(iactfunc.IActFunc):
def __init__(self):
pass
def activate(self, x: np.array) -> np.array:
return 1 / (1 + np.exp(-x))
| 18.272727
| 48
| 0.616915
| 29
| 201
| 4.137931
| 0.62069
| 0.116667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013423
| 0.258706
| 201
| 10
| 49
| 20.1
| 0.791946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.142857
| 0.285714
| 0.142857
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
726909f7bc9ab5d8c8fbf53d0947fe9506d1a01f
| 172
|
py
|
Python
|
Python/Tests/TestData/DebuggerProject/LocalClosureVarsTestImported.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 404
|
2019-05-07T02:21:57.000Z
|
2022-03-31T17:03:04.000Z
|
Python/Tests/TestData/DebuggerProject/LocalClosureVarsTestImported.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 1,672
|
2019-05-06T21:09:38.000Z
|
2022-03-31T23:16:04.000Z
|
Python/Tests/TestData/DebuggerProject/LocalClosureVarsTestImported.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 186
|
2019-05-13T03:17:37.000Z
|
2022-03-31T16:24:05.000Z
|
class class1(object):
"""description of class"""
def outer(self, x):
y = x
def inner(z):
return x+y+z
return inner
class1().outer(1)(2)
| 17.2
| 30
| 0.534884
| 25
| 172
| 3.68
| 0.6
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033898
| 0.313953
| 172
| 9
| 31
| 19.111111
| 0.745763
| 0.116279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
72946d2d5b03567b3c9692387e47fd43272d5230
| 176
|
py
|
Python
|
0x08-python-more_classes/main/main_7.py
|
johncoleman83/bootcampschool-higher_level_programming
|
a83c3b7092cfe893c87e495f8d8eec9228c9b808
|
[
"MIT"
] | null | null | null |
0x08-python-more_classes/main/main_7.py
|
johncoleman83/bootcampschool-higher_level_programming
|
a83c3b7092cfe893c87e495f8d8eec9228c9b808
|
[
"MIT"
] | null | null | null |
0x08-python-more_classes/main/main_7.py
|
johncoleman83/bootcampschool-higher_level_programming
|
a83c3b7092cfe893c87e495f8d8eec9228c9b808
|
[
"MIT"
] | 1
|
2020-09-25T17:54:36.000Z
|
2020-09-25T17:54:36.000Z
|
#!/usr/bin/python3
Rectangle = __import__('7-rectangle').Rectangle
my_rectangle_1 = Rectangle(8, 4)
my_rectangle_1.print_symbol = ["Bootcamp", "School"]
print(my_rectangle_1)
| 25.142857
| 52
| 0.767045
| 25
| 176
| 4.96
| 0.56
| 0.266129
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04321
| 0.079545
| 176
| 6
| 53
| 29.333333
| 0.722222
| 0.096591
| 0
| 0
| 0
| 0
| 0.158228
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
72a9be4a20cc10e8ac8621200a4dfbb37514dd13
| 13,577
|
py
|
Python
|
src/python/tests/core/base/external_users_test.py
|
ABHIsHEk122811/clusterfuzz
|
7cac0ee869787e6f547a4b3dac18196c60f03383
|
[
"Apache-2.0"
] | 4
|
2019-11-26T01:50:51.000Z
|
2021-08-14T20:32:43.000Z
|
src/python/tests/core/base/external_users_test.py
|
ABHIsHEk122811/clusterfuzz
|
7cac0ee869787e6f547a4b3dac18196c60f03383
|
[
"Apache-2.0"
] | 22
|
2019-12-26T17:02:34.000Z
|
2022-03-21T22:16:52.000Z
|
src/python/tests/core/base/external_users_test.py
|
ABHIsHEk122811/clusterfuzz
|
7cac0ee869787e6f547a4b3dac18196c60f03383
|
[
"Apache-2.0"
] | 2
|
2019-02-09T09:09:20.000Z
|
2019-02-15T05:25:13.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for external_users."""
import datetime
import unittest
from base import external_users
from datastore import data_types
from tests.test_libs import helpers
from tests.test_libs import test_utils
@test_utils.with_cloud_emulators('datastore')
class ExternalUsersTest(unittest.TestCase):
"""External users test."""
def setUp(self):
helpers.patch_environ(self)
# Fake permissions.
data_types.ExternalUserPermission(
email='user@example.com',
entity_name='fuzzer',
entity_kind=data_types.PermissionEntityKind.FUZZER,
auto_cc=data_types.AutoCCType.ALL).put()
data_types.ExternalUserPermission(
email='user2@example.com',
entity_name='fuzz',
entity_kind=data_types.PermissionEntityKind.FUZZER,
is_prefix=True,
auto_cc=data_types.AutoCCType.SECURITY).put()
data_types.ExternalUserPermission(
email='user3@example.com',
entity_name='parent_',
entity_kind=data_types.PermissionEntityKind.FUZZER,
is_prefix=True,
auto_cc=data_types.AutoCCType.NONE).put()
data_types.ExternalUserPermission(
email='user4@example.com',
entity_name='parent',
entity_kind=data_types.PermissionEntityKind.FUZZER,
auto_cc=data_types.AutoCCType.NONE).put()
data_types.ExternalUserPermission(
email='user5@example.com',
entity_name='parent_cg',
entity_kind=data_types.PermissionEntityKind.FUZZER,
is_prefix=True,
auto_cc=data_types.AutoCCType.NONE).put()
data_types.ExternalUserPermission(
email='user6@example.com',
entity_name='parens',
entity_kind=data_types.PermissionEntityKind.FUZZER,
is_prefix=True,
auto_cc=data_types.AutoCCType.NONE).put()
data_types.ExternalUserPermission(
email='user7@example.com',
entity_name='parent',
entity_kind=data_types.PermissionEntityKind.FUZZER,
is_prefix=True,
auto_cc=data_types.AutoCCType.ALL).put()
data_types.ExternalUserPermission(
email='user@example.com',
entity_name='job',
entity_kind=data_types.PermissionEntityKind.JOB,
is_prefix=False,
auto_cc=data_types.AutoCCType.ALL).put()
data_types.ExternalUserPermission(
email='user2@example.com',
entity_name='job',
entity_kind=data_types.PermissionEntityKind.JOB,
is_prefix=True,
auto_cc=data_types.AutoCCType.ALL).put()
data_types.ExternalUserPermission(
email='user3@example.com',
entity_name='job2',
entity_kind=data_types.PermissionEntityKind.JOB,
is_prefix=False,
auto_cc=data_types.AutoCCType.ALL).put()
data_types.ExternalUserPermission(
email='user8@example.com',
entity_name='job',
entity_kind=data_types.PermissionEntityKind.JOB,
is_prefix=False,
auto_cc=data_types.AutoCCType.NONE).put()
data_types.ExternalUserPermission(
email='user9@example.com',
entity_name='job2',
entity_kind=data_types.PermissionEntityKind.JOB,
is_prefix=False,
auto_cc=data_types.AutoCCType.NONE).put()
data_types.ExternalUserPermission(
email='user10@example.com',
entity_name='job',
entity_kind=data_types.PermissionEntityKind.JOB,
is_prefix=False,
auto_cc=data_types.AutoCCType.NONE).put()
data_types.ExternalUserPermission(
email='user10@example.com',
entity_name='job3',
entity_kind=data_types.PermissionEntityKind.JOB,
is_prefix=False,
auto_cc=data_types.AutoCCType.NONE).put()
# Fake fuzzers.
data_types.Fuzzer(name='fuzzer').put()
data_types.Fuzzer(name='parent', jobs=['job', 'job2', 'job3']).put()
data_types.Job(name='job').put()
data_types.Job(name='job2').put()
data_types.Job(name='job3').put()
data_types.FuzzTarget(
engine='parent', binary='child', project='test-project').put()
data_types.FuzzTargetJob(
fuzz_target_name='parent_child',
job='job',
last_run=datetime.datetime.utcnow()).put()
data_types.FuzzTarget(
engine='parent', binary='child2', project='test-project').put()
data_types.FuzzTargetJob(
fuzz_target_name='parent_child2',
job='job',
last_run=datetime.datetime.utcnow()).put()
data_types.FuzzTarget(
engine='parent', binary='child', project='test-project').put()
data_types.FuzzTargetJob(
fuzz_target_name='parent_child',
job='job3',
last_run=datetime.datetime.utcnow()).put()
def test_allowed_fuzzers(self):
"""allowed_fuzzers_for_user tests."""
# Direct match.
result = external_users.allowed_fuzzers_for_user('User@example.com')
self.assertEqual(result, ['fuzzer'])
# Prefix on fuzzer name.
result = external_users.allowed_fuzzers_for_user('uSer2@example.com')
self.assertEqual(result, ['fuzzer'])
# Prefix on child fuzzer name.
result = external_users.allowed_fuzzers_for_user('user3@example.com')
self.assertEqual(result, ['parent_child', 'parent_child2'])
# Direct match on a parent fuzzer that has children. Should not have any
# results.
result = external_users.allowed_fuzzers_for_user('user4@example.com')
self.assertEqual(len(result), 0)
# No such user.
result = external_users.allowed_fuzzers_for_user('notexist@example.com')
self.assertEqual(result, [])
def test_allowed_users_for_fuzzer(self):
"""allowed_users_for_fuzzer tests."""
# Direct match + a prefix match.
result = external_users.allowed_users_for_fuzzer('fuzzer')
self.assertEqual(result, ['user2@example.com', 'user@example.com'])
# Child fuzzer prefix match.
result = external_users.allowed_users_for_fuzzer('parent_child')
self.assertEqual(result, ['user3@example.com', 'user7@example.com'])
def test_is_fuzzer_allowed_for_user(self):
"""is_fuzzer_allowed_for_user tests."""
self.assertTrue(
external_users.is_fuzzer_allowed_for_user('uSer@example.com', 'fuzzer'))
self.assertTrue(
external_users.is_fuzzer_allowed_for_user('useR2@example.com',
'fuzzer'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user3@example.com',
'fuzzer'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user4@example.com',
'fuzzer'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user5@example.com',
'fuzzer'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user6@example.com',
'fuzzer'))
# No such user.
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('notexist@example.com',
'fuzzer'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user1@example.com',
'parent_child'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user2@example.com',
'parent_child'))
self.assertTrue(
external_users.is_fuzzer_allowed_for_user('user3@example.com',
'parent_child'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user4@example.com',
'parent_child'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user5@example.com',
'parent_child'))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('user6@example.com',
'parent_child'))
# No such user.
self.assertFalse(
external_users.is_fuzzer_allowed_for_user('notexist@example.com',
'fuzzer'))
def test_is_fuzzer_allowed_for_user_including_jobs(self):
"""is_fuzzer_allowed_for_user tests with include_from_jobs == True."""
self.assertTrue(
external_users.is_fuzzer_allowed_for_user(
'User@example.com', 'parent_child', include_from_jobs=True))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user(
'uSer@example.com', 'parent_child', include_from_jobs=False))
self.assertTrue(
external_users.is_fuzzer_allowed_for_user(
'uSer@example.com', 'parent_child2', include_from_jobs=True))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user(
'user@example.com', 'parent_child2', include_from_jobs=False))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user(
'user@example.com', 'parent_child3', include_from_jobs=True))
self.assertFalse(
external_users.is_fuzzer_allowed_for_user(
'user@example.com', 'parent', include_from_jobs=True))
self.assertTrue(
external_users.is_fuzzer_allowed_for_user(
'user@example.com', 'fuzzer', include_from_jobs=True))
def test_is_job_allowed_for_user(self):
"""is_job_allowed_for_user tests."""
self.assertTrue(
external_users.is_job_allowed_for_user('useR@example.com', 'job'))
self.assertTrue(
external_users.is_job_allowed_for_user('user2@example.com', 'job'))
self.assertTrue(
external_users.is_job_allowed_for_user('user8@example.com', 'job'))
self.assertFalse(
external_users.is_job_allowed_for_user('user3@example.com', 'job'))
self.assertTrue(
external_users.is_job_allowed_for_user('User3@example.com', 'job2'))
self.assertTrue(
external_users.is_job_allowed_for_user('user9@example.com', 'job2'))
self.assertFalse(
external_users.is_job_allowed_for_user('user@example.com', 'job2'))
def test_cc_users_for_fuzzer(self):
"""cc_users_for_fuzzer tests."""
result = external_users.cc_users_for_fuzzer('fuzzer', security_flag=False)
self.assertEqual(result, ['user@example.com'])
result = external_users.cc_users_for_fuzzer('fuzzer', security_flag=True)
self.assertEqual(result, ['user2@example.com', 'user@example.com'])
result = external_users.cc_users_for_fuzzer(
'parent_child', security_flag=True)
self.assertListEqual(result, ['user7@example.com'])
def test_cc_users_for_job(self):
"""cc_users_for_job tests."""
result = external_users.cc_users_for_job('job', security_flag=False)
self.assertEqual(result, ['user2@example.com', 'user@example.com'])
result = external_users.cc_users_for_job('job', security_flag=True)
self.assertEqual(result, ['user2@example.com', 'user@example.com'])
result = external_users.cc_users_for_job('job2', security_flag=False)
self.assertEqual(result, ['user2@example.com', 'user3@example.com'])
def test_allowed_jobs(self):
"""Test allowed_jobs_for_user."""
result = external_users.allowed_jobs_for_user('User@example.com')
self.assertEqual(['job'], result)
result = external_users.allowed_jobs_for_user('uSer2@example.com')
self.assertEqual(['job', 'job2', 'job3'], result)
result = external_users.allowed_jobs_for_user('user3@example.com')
self.assertEqual(['job2'], result)
result = external_users.allowed_jobs_for_user('user4@example.com')
self.assertEqual([], result)
def test_allowed_fuzzers_including_jobs(self):
"""Tests allowed_fuzzers_for_user with jobs."""
result = external_users.allowed_fuzzers_for_user(
'User@example.com', include_from_jobs=True)
self.assertEqual(['fuzzer', 'parent_child', 'parent_child2'], result)
result = external_users.allowed_fuzzers_for_user(
'User8@example.com', include_from_jobs=True)
self.assertEqual(['parent_child', 'parent_child2'], result)
result = external_users.allowed_fuzzers_for_user(
'user8@example.com', include_from_jobs=True, include_parents=True)
self.assertEqual(['parent', 'parent_child', 'parent_child2'], result)
result = external_users.allowed_fuzzers_for_user(
'user9@example.com', include_from_jobs=True)
self.assertEqual([], result)
result = external_users.allowed_fuzzers_for_user('user10@example.com')
self.assertEqual([], result)
result = external_users.allowed_fuzzers_for_user(
'user10@example.com', include_from_jobs=True)
self.assertEqual(['parent_child', 'parent_child2'], result)
result = external_users.allowed_fuzzers_for_user(
'user10@example.com', include_from_jobs=True, include_parents=True)
self.assertEqual(['parent', 'parent_child', 'parent_child2'], result)
if __name__ == '__main__':
unittest.main()
| 38.680912
| 80
| 0.67688
| 1,613
| 13,577
| 5.385617
| 0.101054
| 0.082882
| 0.054795
| 0.051802
| 0.828594
| 0.790376
| 0.783124
| 0.738575
| 0.696903
| 0.635893
| 0
| 0.008286
| 0.208883
| 13,577
| 350
| 81
| 38.791429
| 0.800484
| 0.087575
| 0
| 0.626459
| 0
| 0
| 0.15624
| 0
| 0
| 0
| 0
| 0
| 0.202335
| 1
| 0.038911
| false
| 0
| 0.023346
| 0
| 0.066148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
72ad318242d72fca90b5c6df82e25fbedbfa4912
| 23,626
|
py
|
Python
|
buc/test.py
|
sursum/buckanjaren
|
945454add4169fd7eef3ee372f3672de2f623ce6
|
[
"MIT"
] | null | null | null |
buc/test.py
|
sursum/buckanjaren
|
945454add4169fd7eef3ee372f3672de2f623ce6
|
[
"MIT"
] | 6
|
2021-02-08T20:20:47.000Z
|
2022-03-11T23:19:29.000Z
|
test.py
|
sursum/buckanjaren
|
945454add4169fd7eef3ee372f3672de2f623ce6
|
[
"MIT"
] | null | null | null |
from django.core.urlresolvers import reverse
from django.test import TestCase, LiveServerTestCase, Client
from django.utils import timezone
from buc.models import Article, Category, Tag
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
import factory.django
import markdown
#import markdown2 as markdown
#import feedparser
class SiteFactory(factory.django.DjangoModelFactory):
class Meta:
model = Site
django_get_or_create = (
'name',
'domain'
)
name = 'example.com'
domain = 'example.com'
class CategoryFactory(factory.django.DjangoModelFactory):
class Meta:
model = Category
django_get_or_create = (
'name',
'description',
'slug'
)
name = 'python'
description = 'The Python programming language'
slug = 'python'
class TagFactory(factory.django.DjangoModelFactory):
class Meta:
model = Tag
django_get_or_create = (
'name',
'description',
'slug'
)
name = 'python'
description = 'The Python programming language'
slug = 'python'
class AuthorFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
django_get_or_create = ('username','email', 'password',)
username = 'testuser'
email = 'user@example.com'
password = 'testuser'
class FlatPageFactory(factory.django.DjangoModelFactory):
class Meta:
model = FlatPage
django_get_or_create = (
'url',
'title',
'content'
)
url = '/about/'
title = 'About me'
content = 'All about me'
class ArticleFactory(factory.django.DjangoModelFactory):
class Meta:
model = Article
django_get_or_create = (
'title',
'text',
'slug',
'published_date'
)
title = 'My first article'
text = 'This is my first blog article'
slug = 'my-first-article'
published_date = timezone.now()
site = factory.SubFactory(SiteFactory)
category = factory.SubFactory(CategoryFactory)
###########################################################################
# Create your tests here.
###########################################################################
class ArticleTest(TestCase):
def test_create_article(self):
# Create the category
category = Category()
category.name = 'python'
category.description = 'The Python programming language'
category.save()
# Create the author
author = User.objects.create_user('testuser', 'user@example.com', 'password')
author.save()
# Create the site
''' site = Site()
site.name = 'example.com'
site.domain = 'example.com'
site.save() '''
# Create the article
article = Article()
# Set the attributes
article.title = 'My first article'
article.text = 'This is my first article'
article.published_date = timezone.now()
## article.created_date = timezone.now() Set by default
article.comment = 'This is a comment'
## article.cpyrght = 'buckanjaren' Set by default
## Left to do
'''
article.editor = models.ForeignKey(User,blank=True,null=True)
article.creator = models.ForeignKey(Profile,blank=True,null=True)
article.commentator = models.ForeignKey(Profile,blank=True,null=True)
'''
# Save it
article.save()
# Check we can find it
all_articles = Article.objects.all()
self.assertEquals(len(all_articles), 1)
only_article = all_articles[0]
self.assertEquals(only_article, article)
# Check attributes
self.assertEquals(only_article.title, 'My first article')
self.assertEquals(only_article.text, 'This is my first article')
self.assertEquals(only_article.published_date.day, article.published_date.day)
self.assertEquals(only_article.published_date.month, article.published_date.month)
self.assertEquals(only_article.published_date.year, article.published_date.year)
self.assertEquals(only_article.published_date.hour, article.published_date.hour)
self.assertEquals(only_article.published_date.minute, article.published_date.minute)
self.assertEquals(only_article.published_date.second, article.published_date.second)
self.assertEquals(only_article.created_date.day, article.created_date.day)
self.assertEquals(only_article.created_date.month, article.created_date.month)
self.assertEquals(only_article.created_date.year, article.created_date.year)
self.assertEquals(only_article.created_date.hour, article.created_date.hour)
self.assertEquals(only_article.created_date.minute, article.created_date.minute)
self.assertEquals(only_article.comment, 'This is a comment')
self.assertEquals(only_article.cpyrght, 'buckanjaren')
###########################################################################
###########################################################################
class BaseAcceptanceTest(LiveServerTestCase):
def setUp(self):
self.client = Client()
class AdminTest(BaseAcceptanceTest):
fixtures = ['users.json']
def test_login(self):
# Get login page
response = self.client.get('/admin/', follow=True)
# Check response code
self.assertEqual(response.status_code, 200)
# Check 'Log in' in response
self.assertTrue('Log in' in response.content.decode('utf-8'))
# Log the user in
self.client.login(username='bobsmith', password="testuser")
# Check response code
response = self.client.get('/admin/')
self.assertEqual(response.status_code, 200)
# Check 'Log out' in response
self.assertTrue('Log out' in response.content.decode('utf-8'))
def test_logout(self):
# Log in
self.client.login(username='bobsmith', password="testuser")
# Check response code
response = self.client.get('/admin/')
self.assertEqual(response.status_code, 200)
# Check 'Log out' in response
self.assertTrue('Log out' in response.content.decode('utf-8'))
# Log out
self.client.logout()
# Check response code
response = self.client.get('/admin/', follow=True)
self.assertEqual(response.status_code, 200)
# Check 'Log in' in response
self.assertTrue('Log in' in response.content.decode('utf-8'))
def test_create_category(self):
# Log in
self.client.login(username='bobsmith', password="testuser")
# Check response code
response = self.client.get('/admin/buc/category/add/')
self.assertEqual(response.status_code, 200)
# Create the new category
response = self.client.post('/admin/buc/category/add/', {
'name': 'python',
'description': 'The Python programming language'
},
follow=True
)
self.assertEqual(response.status_code, 200)
# Check added successfully
self.assertTrue('added successfully' in response.content.decode('utf-8'))
# Check new category now in database
all_categories = Category.objects.all()
self.assertEqual(len(all_categories), 1)
def test_edit_category(self):
# Create the category
category = CategoryFactory()
# Log in
self.client.login(username='bobsmith', password="testuser")
# Edit the category
response = self.client.post('/admin/buc/category/' + str(category.pk) + '/change/', {
'name': 'perl',
'description': 'The Perl programming language'
}, follow=True)
self.assertEqual(response.status_code, 200)
# Check changed successfully
self.assertTrue('changed successfully' in response.content.decode('utf-8'))
# Check category amended
all_categories = Category.objects.all()
self.assertEqual(len(all_categories), 1)
only_category = all_categories[0]
self.assertEqual(only_category.name, 'perl')
self.assertEqual(only_category.description, 'The Perl programming language')
def test_delete_category(self):
# Create the category
category = CategoryFactory()
# Log in
self.client.login(username='bobsmith', password="testuser")
# Delete the category
response = self.client.post('/admin/buc/category/' + str(category.pk) + '/delete/', {
'article': 'yes'
}, follow=True)
self.assertEqual(response.status_code, 200)
# Check deleted successfully
self.assertTrue('deleted successfully' in response.content.decode('utf-8'))
# Check category deleted
all_categories = Category.objects.all()
self.assertEqual(len(all_categories), 0)
def test_create_tag(self):
# Log in
self.client.login(username='bobsmith', password="testuser")
# Check response code
response = self.client.get('/admin/buc/tag/add/')
self.assertEqual(response.status_code, 200)
# Create the new tag
response = self.client.post('/admin/buc/tag/add/', {
'name': 'python',
'description': 'The Python programming language'
},
follow=True
)
self.assertEqual(response.status_code, 200)
# Check added successfully
self.assertTrue('added successfully' in response.content.decode('utf-8'))
# Check new tag now in database
all_tags = Tag.objects.all()
self.assertEqual(len(all_tags), 1)
def test_edit_tag(self):
# Create the tag
tag = TagFactory()
# Log in
self.client.login(username='bobsmith', password="testuser")
# Edit the tag
response = self.client.post('/admin/buc/tag/' + str(tag.pk) + '/change/', {
'name': 'perl',
'description': 'The Perl programming language'
}, follow=True)
self.assertEqual(response.status_code, 200)
# Check changed successfully
self.assertTrue('changed successfully' in response.content.decode('utf-8'))
# Check tag amended
all_tags = Tag.objects.all()
self.assertEqual(len(all_tags), 1)
only_tag = all_tags[0]
self.assertEqual(only_tag.name, 'perl')
self.assertEqual(only_tag.description, 'The Perl programming language')
def test_delete_tag(self):
# Create the tag
tag = TagFactory()
# Log in
self.client.login(username='bobsmith', password="testuser")
# Delete the tag
response = self.client.post('/admin/buc/tag/' + str(tag.pk) + '/delete/', {
'article': 'yes'
}, follow=True)
self.assertEqual(response.status_code, 200)
# Check deleted successfully
self.assertTrue('deleted successfully' in response.content.decode('utf-8'))
# Check tag deleted
all_tags = Tag.objects.all()
self.assertEqual(len(all_tags), 0)
def test_create_article(self):
# Create the category
category = CategoryFactory()
# Create the tag
tag = TagFactory()
# Log in
self.client.login(username='bobsmith', password="testuser")
# Check response code
response = self.client.get('/admin/buc/article/add/')
self.assertEqual(response.status_code, 200)
# Create the new article
response = self.client.post('/admin/buc/article/add/', {
'title': 'My first article',
'text': 'This is my first article',
'published_date_0': '2013-12-28',
'published_date_1': '22:00:04',
'slug': 'my-first-article',
'site': '1',
'category': str(category.pk),
'tags': str(tag.pk)
},
follow=True
)
self.assertEqual(response.status_code, 200)
# Check added successfully
self.assertTrue('added successfully' in response.content.decode('utf-8'))
# Check new article now in database
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
def test_create_article_without_tag(self):
# Create the category
category = CategoryFactory()
# Log in
self.client.login(username='bobsmith', password="testuser")
# Check response code
response = self.client.get('/admin/buc/article/add/')
self.assertEqual(response.status_code, 200)
# Create the new article
response = self.client.post('/admin/buc/article/add/', {
'title': 'My first article',
'text': 'This is my first article',
'published_date_0': '2013-12-28',
'published_date_1': '22:00:04',
'slug': 'my-first-article',
'site': '1',
'category': str(category.pk)
},
follow=True
)
self.assertEqual(response.status_code, 200)
# Check added successfully
self.assertTrue('added successfully' in response.content.decode('utf-8'))
# Check new article now in database
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
def test_edit_article(self):
# Create the article
article = ArticleFactory()
# Create the category
category = CategoryFactory()
# Create the tag
tag = TagFactory()
article.tags.add(tag)
# Log in
self.client.login(username='bobsmith', password="testuser")
# Edit the article
response = self.client.post('/admin/buc/article/' + str(article.pk) + '/change/', {
'title': 'My second article',
'text': 'This is my second blog article',
'published_date_0': '2013-12-28',
'published_date_1': '22:00:04',
'slug': 'my-second-article',
'site': '1',
'category': str(category.pk),
'tags': str(tag.pk)
},
follow=True
)
self.assertEqual(response.status_code, 200)
# Check changed successfully
self.assertTrue('changed successfully' in response.content.decode('utf-8'))
# Check article amended
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
only_article = all_articles[0]
self.assertEqual(only_article.title, 'My second article')
self.assertEqual(only_article.text, 'This is my second blog article')
def test_delete_article(self):
# Create the article
article = ArticleFactory()
# Create the tag
tag = TagFactory()
article.tags.add(tag)
# Check new article saved
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
# Log in
self.client.login(username='bobsmith', password="testuser")
# Delete the article
response = self.client.post('/admin/buc/article/' + str(article.pk) + '/delete/', {
'article': 'yes'
}, follow=True)
self.assertEqual(response.status_code, 200)
# Check deleted successfully
self.assertTrue('deleted successfully' in response.content.decode('utf-8'))
# Check article deleted
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 0)
###########################################################################
## Test of Article View
###########################################################################
class ArticleViewTest(BaseAcceptanceTest):
def setUp(self):
self.client = Client()
def test_index(self):
# Create the article
article = ArticleFactory(text='This is [my first blog article](http://127.0.0.1:8000/)')
# Create the tag
tag = TagFactory(name='perl', description='The Perl programming language')
article.tags.add(tag)
# Check new article saved
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
# Fetch the index
response = self.client.get(reverse('buc:HomeView'))
self.assertEqual(response.status_code, 200)
# Check the article title is in the response
self.assertTrue(article.title in response.content.decode('utf-8'))
# Check the article text is in the response
self.assertTrue(markdown.markdown(article.text) in response.content.decode('utf-8'))
# Check the article category is in the response
self.assertTrue(article.category.name in response.content.decode('utf-8'))
# Check the article tag is in the response
article_tag = all_articles[0].tags.all()[0]
#print(response.content.decode('utf-8'))
self.assertTrue(article_tag.name in response.content.decode('utf-8'))
# Check the article date is in the response
self.assertTrue(str(article.published_date.year) in response.content.decode('utf-8'))
self.assertTrue(article.published_date.strftime('%b') in response.content.decode('utf-8'))
self.assertTrue(str(article.published_date.day) in response.content.decode('utf-8'))
# Check the link is marked up properly
self.assertTrue('<a href="http://127.0.0.1:8000/">my first blog article</a>' in response.content.decode('utf-8'))
# Check the correct template was used
self.assertTemplateUsed(response, 'buc/homeview.html')
def test_article_page(self):
# Create the article
article = ArticleFactory(text='This is [my first blog article](http://127.0.0.1:8000/)')
# Create the tag
tag = TagFactory(name='perl', description='The Perl programming language')
article.tags.add(tag)
# Check new article saved
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
only_article = all_articles[0]
self.assertEqual(only_article, article)
# Get the article URL
article_url = only_article.get_absolute_url()
# Fetch the article
response = self.client.get(article_url)
self.assertEqual(response.status_code, 200)
# Check the article title is in the response
self.assertTrue(article.title in response.content.decode('utf-8'))
# Check the article category is in the response
self.assertTrue(article.category.name in response.content.decode('utf-8'))
# Check the article tag is in the response
article_tag = all_articles[0].tags.all()[0]
self.assertTrue(article_tag.name in response.content.decode('utf-8'))
# Check the article text is in the response
self.assertTrue(markdown.markdown(article.text) in response.content.decode('utf-8'))
# Check the article date is in the response
self.assertTrue(str(article.published_date.year) in response.content.decode('utf-8'))
self.assertTrue(article.published_date.strftime('%b') in response.content.decode('utf-8'))
self.assertTrue(str(article.published_date.day) in response.content.decode('utf-8'))
# Check the link is marked up properly
self.assertTrue('<a href="http://127.0.0.1:8000/">my first blog article</a>' in response.content.decode('utf-8'))
# Check the correct template was used
self.assertTemplateUsed(response, 'buc/article_detail.html')
def test_category_page(self):
# Create the article
article = ArticleFactory(text='This is [my first blog article](http://127.0.0.1:8000/)')
# Check new article saved
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
only_article = all_articles[0]
self.assertEqual(only_article, article)
# Get the category URL
category_url = article.category.get_absolute_url()
# Fetch the category
response = self.client.get(category_url)
self.assertEqual(response.status_code, 200)
# Check the category name is in the response
self.assertTrue(article.category.name in response.content.decode('utf-8'))
# Check the article text is in the response
self.assertTrue(markdown.markdown(article.text) in response.content.decode('utf-8'))
# Check the article date is in the response
self.assertTrue(str(article.published_date.year) in response.content.decode('utf-8'))
self.assertTrue(article.published_date.strftime('%b') in response.content.decode('utf-8'))
self.assertTrue(str(article.published_date.day) in response.content.decode('utf-8'))
# Check the link is marked up properly
self.assertTrue('<a href="http://127.0.0.1:8000/">my first blog article</a>' in response.content.decode('utf-8'))
# Check the correct template was used
self.assertTemplateUsed(response, 'buc/article_list.html')
def test_nonexistent_category_page(self):
category_url = '/category/blah/'
response = self.client.get(category_url)
self.assertEqual(response.status_code, 200)
self.assertTrue('No articles found' in response.content.decode('utf-8'))
def test_tag_page(self):
# Create the author
author = AuthorFactory()
# Create the site
site = SiteFactory()
# Create the article
article = ArticleFactory(text='This is [my first blog article](http://127.0.0.1:8000/)')
# Create the tag
tag = TagFactory()
article.tags.add(tag)
# Check new article saved
all_articles = Article.objects.all()
self.assertEqual(len(all_articles), 1)
only_article = all_articles[0]
self.assertEqual(only_article, article)
# Get the tag URL
tag_url = article.tags.all()[0].get_absolute_url()
# Fetch the tag
response = self.client.get(tag_url)
self.assertEqual(response.status_code, 200)
# Check the tag name is in the response
self.assertTrue(article.tags.all()[0].name in response.content.decode('utf-8'))
# Check the article text is in the response
self.assertTrue(markdown.markdown(article.text) in response.content.decode('utf-8'))
# Check the article date is in the response
self.assertTrue(str(article.published_date.year) in response.content.decode('utf-8'))
self.assertTrue(article.published_date.strftime('%b') in response.content.decode('utf-8'))
self.assertTrue(str(article.published_date.day) in response.content.decode('utf-8'))
# Check the link is marked up properly
self.assertTrue('<a href="http://127.0.0.1:8000/">my first blog article</a>' in response.content.decode('utf-8'))
# Check the correct template was used
self.assertTemplateUsed(response, 'buc/article_list.html')
def test_nonexistent_tag_page(self):
tag_url = '/tag/blah/'
response = self.client.get(tag_url)
self.assertEqual(response.status_code, 200)
self.assertTrue('No articles found' in response.content.decode('utf-8'))
| 35.210134
| 121
| 0.619614
| 2,714
| 23,626
| 5.309506
| 0.072955
| 0.049965
| 0.065579
| 0.074948
| 0.823803
| 0.798681
| 0.73449
| 0.716725
| 0.690354
| 0.670437
| 0
| 0.0157
| 0.250529
| 23,626
| 671
| 122
| 35.210134
| 0.798102
| 0.130407
| 0
| 0.608579
| 0
| 0.010724
| 0.151379
| 0.010452
| 0
| 0
| 0
| 0
| 0.302949
| 1
| 0.0563
| false
| 0.040214
| 0.024129
| 0
| 0.179625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
72da91fba876a726df19b13e30c306cac4226b1b
| 88
|
py
|
Python
|
application/errors.py
|
Fizic/ConcordWeather
|
df3c04ed9befe9b160ba3d00327e5aa516452a75
|
[
"MIT"
] | null | null | null |
application/errors.py
|
Fizic/ConcordWeather
|
df3c04ed9befe9b160ba3d00327e5aa516452a75
|
[
"MIT"
] | null | null | null |
application/errors.py
|
Fizic/ConcordWeather
|
df3c04ed9befe9b160ba3d00327e5aa516452a75
|
[
"MIT"
] | null | null | null |
class CityLimitError(Exception):
pass
class CityDoesNotExist(Exception):
pass
| 12.571429
| 34
| 0.75
| 8
| 88
| 8.25
| 0.625
| 0.393939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 88
| 6
| 35
| 14.666667
| 0.916667
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f4275080d92d60e210b7ed1cb95b15ab13e5a4ee
| 235
|
py
|
Python
|
yolox/exp/__init__.py
|
junhai0428/YOLOX-OBB
|
9f0d745b89ba6559e692ff06ba09b433b2f4594c
|
[
"Apache-2.0"
] | null | null | null |
yolox/exp/__init__.py
|
junhai0428/YOLOX-OBB
|
9f0d745b89ba6559e692ff06ba09b433b2f4594c
|
[
"Apache-2.0"
] | null | null | null |
yolox/exp/__init__.py
|
junhai0428/YOLOX-OBB
|
9f0d745b89ba6559e692ff06ba09b433b2f4594c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
from .base_exp import BaseExp
from .build import get_exp
from .yolox_base import Exp
from .yolox_base_obb_kld import ExpOBB_KLD
| 23.5
| 58
| 0.757447
| 39
| 235
| 4.384615
| 0.692308
| 0.081871
| 0.140351
| 0.187135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049505
| 0.140426
| 235
| 9
| 59
| 26.111111
| 0.79703
| 0.421277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f42b552c7870e20d64e40cc66bf156cd6c76af60
| 119
|
py
|
Python
|
qsimov/structures/qbase.py
|
Mowstyl/QSimov
|
091dc2ebb9aad1916b98ccc05d63563ef518e77c
|
[
"MIT"
] | 3
|
2021-04-29T08:59:14.000Z
|
2022-01-01T13:54:32.000Z
|
qsimov/structures/qbase.py
|
Mowstyl/QSimov
|
091dc2ebb9aad1916b98ccc05d63563ef518e77c
|
[
"MIT"
] | 2
|
2021-09-20T11:29:59.000Z
|
2022-03-29T18:27:14.000Z
|
qsimov/structures/qbase.py
|
Mowstyl/QSimov
|
091dc2ebb9aad1916b98ccc05d63563ef518e77c
|
[
"MIT"
] | 3
|
2021-04-29T08:59:17.000Z
|
2022-01-01T13:55:04.000Z
|
from abc import ABC, abstractmethod
class QBase(ABC):
@abstractmethod
def get_num_qubits(self):
pass
| 14.875
| 35
| 0.689076
| 15
| 119
| 5.333333
| 0.8
| 0.425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243697
| 119
| 7
| 36
| 17
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
f44938c8b199c00e74fb8e17b5ca86153b2cea56
| 284
|
py
|
Python
|
src/yellowdog_client/model/exceptions/invalid_session_exception.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/exceptions/invalid_session_exception.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/exceptions/invalid_session_exception.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass, field
from .base_custom_exception import BaseCustomException
from .error_type import ErrorType
@dataclass
class InvalidSessionException(BaseCustomException):
errorType: ErrorType = field(default=ErrorType.InvalidSessionException, init=False)
| 28.4
| 87
| 0.848592
| 28
| 284
| 8.5
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098592
| 284
| 9
| 88
| 31.555556
| 0.929688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f44fab9ee339d930be8450f7578b5729db169354
| 65
|
py
|
Python
|
Google It-Cert-Automation/b-Using-Python-to-Interact-with-the-Operating-System/W1/hello_world.py
|
JAL-code/google
|
3989af94bb72b6fabde95ed50bac1493640d4cf6
|
[
"MIT"
] | null | null | null |
Google It-Cert-Automation/b-Using-Python-to-Interact-with-the-Operating-System/W1/hello_world.py
|
JAL-code/google
|
3989af94bb72b6fabde95ed50bac1493640d4cf6
|
[
"MIT"
] | null | null | null |
Google It-Cert-Automation/b-Using-Python-to-Interact-with-the-Operating-System/W1/hello_world.py
|
JAL-code/google
|
3989af94bb72b6fabde95ed50bac1493640d4cf6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
print("Hello " * 10)
print("Hello World")
| 16.25
| 22
| 0.661538
| 10
| 65
| 4.3
| 0.8
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.123077
| 65
| 3
| 23
| 21.666667
| 0.701754
| 0.323077
| 0
| 0
| 0
| 0
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
f462a34a43b4a8632f592136a58f145ca3c40478
| 209
|
py
|
Python
|
abdbeam/__init__.py
|
victorazzo/abdbeam
|
378d4324e26a3860b2a9e1b139f2372fe76d2a1e
|
[
"BSD-3-Clause"
] | 10
|
2019-10-04T05:41:40.000Z
|
2022-03-09T04:48:37.000Z
|
abdbeam/__init__.py
|
victorazzo/abdbeam
|
378d4324e26a3860b2a9e1b139f2372fe76d2a1e
|
[
"BSD-3-Clause"
] | 1
|
2018-12-16T03:40:22.000Z
|
2019-01-26T16:32:08.000Z
|
abdbeam/__init__.py
|
victorazzo/abdbeam
|
378d4324e26a3860b2a9e1b139f2372fe76d2a1e
|
[
"BSD-3-Clause"
] | 1
|
2021-06-30T08:05:12.000Z
|
2021-06-30T08:05:12.000Z
|
from .core import Section, Point, Segment, Load
from .materials import (Material, Isotropic, ShearConnector, PlyMaterial,
Laminate)
from .plots import plot_section, plot_section_loads
| 34.833333
| 73
| 0.722488
| 23
| 209
| 6.434783
| 0.695652
| 0.148649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215311
| 209
| 5
| 74
| 41.8
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be306cf61aa78cbfa10253e62b5416481ad37c96
| 7,454
|
py
|
Python
|
tests/unit/test_waiter.py
|
nhsuk/ui-test-core
|
089a93a3f375993812c3c5946f18985f99b42a6f
|
[
"MIT"
] | 8
|
2019-09-16T14:31:38.000Z
|
2022-02-03T21:26:04.000Z
|
tests/unit/test_waiter.py
|
nhsuk/ui-test-core
|
089a93a3f375993812c3c5946f18985f99b42a6f
|
[
"MIT"
] | 12
|
2019-09-13T14:47:26.000Z
|
2022-01-10T11:24:52.000Z
|
tests/unit/test_waiter.py
|
nhsuk/ui-test-core
|
089a93a3f375993812c3c5946f18985f99b42a6f
|
[
"MIT"
] | 4
|
2019-09-16T14:49:53.000Z
|
2022-02-02T15:42:01.000Z
|
from unittest import mock
from unittest.mock import MagicMock
from hamcrest import calling, raises, is_not
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from tests.unit.unit_test_utils import *
from uitestcore.page_element import PageElement
from uitestcore.waiter import Waiter
class MockBrowserIsReady:
def __call__(self, *args):
return True
class MockBrowserIsNotReady:
def __call__(self, *args):
return False
class MockVisibilityOfElementLocated:
def __init__(self, locator):
self.locator = locator
def __call__(self, driver):
if self.locator[1] == "visible_element":
return True
return False
class MockPresenceOfElementLocated(object):
def __init__(self, locator):
self.locator = locator
def __call__(self, driver):
if self.locator[1] == "element_present":
return True
return False
class MockElementHasAttribute:
def __init__(self, finder, page_element, attribute_name, expected_attribute_value):
self.find = finder
self.page_element = page_element
self.attribute_name = attribute_name
self.expected_attribute_value = expected_attribute_value
def __call__(self, *args):
if self.expected_attribute_value == "test_value_exists":
return True
return False
class MockAlertIsPresent:
def __call__(self, driver):
return True
class MockAlertIsNotPresent:
def __call__(self, driver):
return False
@mock.patch("time.sleep")
@mock.patch("uitestcore.custom_expected_conditions.BrowserIsReady", side_effect=MockBrowserIsReady)
def test_for_page_to_load_ready(mock_browser_is_ready, mock_sleep):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
assert_that(calling(wait.for_page_to_load), is_not(raises(TimeoutException)),
"Waiting for the page to load when the browser is ready should not raise an exception")
check_mocked_functions_called(mock_sleep, mock_browser_is_ready)
@mock.patch("time.sleep")
@mock.patch("uitestcore.custom_expected_conditions.BrowserIsReady", side_effect=MockBrowserIsNotReady)
def test_for_page_to_load_not_ready(mock_browser_is_ready, mock_sleep):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
assert_that(calling(wait.for_page_to_load), raises(TimeoutException),
"Waiting for the page to load when the browser is not ready should raise an exception")
check_mocked_functions_called(mock_sleep, mock_browser_is_ready)
@mock.patch("uitestcore.waiter.visibility_of_element_located", side_effect=MockVisibilityOfElementLocated)
def test_for_element_to_be_visible_is_visible(mock_visibility_of_element_located):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
page_element = PageElement(By.ID, "visible_element")
assert_that(calling(wait.for_element_to_be_visible).with_args(page_element), is_not(raises(TimeoutException)),
"Waiting for an element to be visible when the element is displayed should not raise an exception")
check_mocked_functions_called(mock_visibility_of_element_located)
@mock.patch("time.sleep")
@mock.patch("uitestcore.waiter.visibility_of_element_located", side_effect=MockVisibilityOfElementLocated)
def test_for_element_to_be_visible_is_not_visible(mock_visibility_of_element_located, mock_sleep):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
page_element = PageElement(By.ID, "not_visible_element")
assert_that(calling(wait.for_element_to_be_visible).with_args(page_element), raises(TimeoutException),
"Waiting for an element to be visible when the element is not displayed should raise an exception")
check_mocked_functions_called(mock_sleep, mock_visibility_of_element_located)
@mock.patch("uitestcore.waiter.presence_of_element_located", side_effect=MockPresenceOfElementLocated)
def test_for_element_to_be_present_is_present(mock_presence_of_element_located):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
page_element = PageElement(By.ID, "element_present")
assert_that(calling(wait.for_element_to_be_present).with_args(page_element), is_not(raises(TimeoutException)),
"Waiting for an element to be present when the element is present should not raise an exception")
check_mocked_functions_called(mock_presence_of_element_located)
@mock.patch("time.sleep")
@mock.patch("uitestcore.waiter.presence_of_element_located", side_effect=MockPresenceOfElementLocated)
def test_for_element_to_be_present_is_not_present(mock_presence_of_element_located, mock_sleep):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
page_element = PageElement(By.ID, "element_not_present")
assert_that(calling(wait.for_element_to_be_present).with_args(page_element), raises(TimeoutException),
"Waiting for an element to be present when the element is not present should raise an exception")
check_mocked_functions_called(mock_sleep, mock_presence_of_element_located)
@mock.patch("uitestcore.waiter.ElementHasAttribute", side_effect=MockElementHasAttribute)
def test_for_element_to_have_attribute_exists(mock_element_has_attribute):
test_waiter = Waiter("driver", "finder", 0, MagicMock(name="logger"))
page_element = PageElement(By.ID, "id_exists")
assert_that(calling(test_waiter.for_element_to_have_attribute).with_args(
page_element, "test_attribute", "test_value_exists"), is_not(raises(TimeoutException)),
"Checking for an element with an existing attribute should not raise an exception")
check_mocked_functions_called(mock_element_has_attribute)
@mock.patch("time.sleep")
@mock.patch("uitestcore.waiter.ElementHasAttribute", side_effect=MockElementHasAttribute)
def test_for_element_to_have_attribute_not_exists(mock_element_has_attribute, mock_sleep):
test_waiter = Waiter("driver", "finder", 0, MagicMock(name="logger"))
page_element = PageElement(By.ID, "id_exists")
assert_that(calling(test_waiter.for_element_to_have_attribute).with_args(
page_element, "test_attribute", "test_not_exists"), raises(TimeoutException),
"Checking for an element with an non-existent attribute should raise an exception")
check_mocked_functions_called(mock_sleep, mock_element_has_attribute)
@mock.patch("uitestcore.waiter.alert_is_present", side_effect=MockAlertIsPresent)
def test_for_alert_to_be_present_is_present(mock_alert_is_present):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
assert_that(calling(wait.for_alert_to_be_present), is_not(raises(TimeoutException)),
"Waiting for an alert to be present when the alert is present should not raise an exception")
check_mocked_functions_called(mock_alert_is_present)
@mock.patch("time.sleep")
@mock.patch("uitestcore.waiter.alert_is_present", side_effect=MockAlertIsNotPresent)
def test_for_alert_to_be_present_is_not_present(mock_alert_is_present, mock_sleep):
wait = Waiter("driver", "finder", 0, MagicMock(name="logger"))
assert_that(calling(wait.for_alert_to_be_present), raises(TimeoutException),
"Waiting for an alert to be present when the alert is not present should raise an exception")
check_mocked_functions_called(mock_sleep, mock_alert_is_present)
| 42.352273
| 115
| 0.774349
| 980
| 7,454
| 5.522449
| 0.102041
| 0.013304
| 0.035477
| 0.035107
| 0.823171
| 0.772727
| 0.742609
| 0.723947
| 0.689209
| 0.671101
| 0
| 0.001869
| 0.138583
| 7,454
| 175
| 116
| 42.594286
| 0.840991
| 0
| 0
| 0.415254
| 0
| 0
| 0.234907
| 0.057687
| 0
| 0
| 0
| 0
| 0.084746
| 1
| 0.169492
| false
| 0
| 0.067797
| 0.033898
| 0.381356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
be5fe2e3d8b1ff91a22b7870c9b47fb15fecd6a5
| 120
|
py
|
Python
|
src/m3_ext/ui/misc/__init__.py
|
barsgroup/m3-ext
|
15cd6772a5e664431e363ee9755c5fbf9a535c21
|
[
"MIT"
] | 1
|
2020-06-03T18:26:16.000Z
|
2020-06-03T18:26:16.000Z
|
src/m3_ext/ui/misc/__init__.py
|
barsgroup/m3-ext
|
15cd6772a5e664431e363ee9755c5fbf9a535c21
|
[
"MIT"
] | null | null | null |
src/m3_ext/ui/misc/__init__.py
|
barsgroup/m3-ext
|
15cd6772a5e664431e363ee9755c5fbf9a535c21
|
[
"MIT"
] | 1
|
2018-04-21T12:13:58.000Z
|
2018-04-21T12:13:58.000Z
|
#coding:utf-8
"""
Дополнительный функционал
"""
from store import ExtDataStore, ExtJsonStore
from label import ExtLabel
| 17.142857
| 44
| 0.8
| 14
| 120
| 6.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009434
| 0.116667
| 120
| 6
| 45
| 20
| 0.896226
| 0.316667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be63b439fb2afc89cd5511b141a2704add7678cc
| 146
|
py
|
Python
|
src/simmate/calculators/vasp/outputs/__init__.py
|
jacksund/simmate
|
0b29704540574e11b711f7b44e2cb7740141ebb4
|
[
"BSD-3-Clause"
] | 9
|
2021-12-21T02:58:21.000Z
|
2022-01-25T14:00:06.000Z
|
src/simmate/calculators/vasp/outputs/__init__.py
|
jacksund/simmate
|
0b29704540574e11b711f7b44e2cb7740141ebb4
|
[
"BSD-3-Clause"
] | 51
|
2022-01-01T15:59:58.000Z
|
2022-03-26T21:25:42.000Z
|
src/simmate/calculators/vasp/outputs/__init__.py
|
jacksund/simmate
|
0b29704540574e11b711f7b44e2cb7740141ebb4
|
[
"BSD-3-Clause"
] | 7
|
2022-01-01T03:44:32.000Z
|
2022-03-29T19:59:27.000Z
|
# -*- coding: utf-8 -*-
from simmate.utilities import get_doc_from_readme
__doc__ = get_doc_from_readme(__file__)
from .oszicar import Oszicar
| 18.25
| 49
| 0.773973
| 21
| 146
| 4.714286
| 0.571429
| 0.121212
| 0.20202
| 0.323232
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007874
| 0.130137
| 146
| 7
| 50
| 20.857143
| 0.771654
| 0.143836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
beae1cb835061866caa10b7fe1867388d413a83a
| 65
|
py
|
Python
|
tests/java/org/python/indexer/data/pkg/plant/garden/catnip.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 577
|
2020-06-04T16:34:44.000Z
|
2022-03-31T11:46:07.000Z
|
tests/java/org/python/indexer/data/pkg/plant/garden/catnip.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 174
|
2015-01-08T20:37:09.000Z
|
2020-06-03T16:48:59.000Z
|
tests/java/org/python/indexer/data/pkg/plant/garden/catnip.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 162
|
2015-02-07T02:14:38.000Z
|
2020-05-30T16:42:03.000Z
|
# reserved for circular import test
import pkg.animal.mammal.cat
| 21.666667
| 35
| 0.815385
| 10
| 65
| 5.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 65
| 2
| 36
| 32.5
| 0.929825
| 0.507692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe295956b414aa118bec80cbd8129eec7b6a692d
| 49,570
|
py
|
Python
|
pkgs/ops-pkg/src/genie/libs/ops/interface/iosxe/asr1k/tests/interface_output.py
|
kecorbin/genielibs
|
5d3951b8911013691822e73e9c3d0f557ca10f43
|
[
"Apache-2.0"
] | null | null | null |
pkgs/ops-pkg/src/genie/libs/ops/interface/iosxe/asr1k/tests/interface_output.py
|
kecorbin/genielibs
|
5d3951b8911013691822e73e9c3d0f557ca10f43
|
[
"Apache-2.0"
] | null | null | null |
pkgs/ops-pkg/src/genie/libs/ops/interface/iosxe/asr1k/tests/interface_output.py
|
kecorbin/genielibs
|
5d3951b8911013691822e73e9c3d0f557ca10f43
|
[
"Apache-2.0"
] | null | null | null |
'''
Interface Genie Ops Object Outputs for IOSXE.
'''
class InterfaceOutput(object):
ShowInterfaces = {
"Port-channel12": {
"flow_control": {
"send": False,
"receive": False
},
"type": "EtherChannel",
"counters": {
"out_buffer_failure": 0,
"out_underruns": 0,
"in_giants": 0,
"in_throttles": 0,
"in_frame": 0,
"in_ignored": 0,
"last_clear": "1d23h",
"out_interface_resets": 2,
"in_mac_pause_frames": 0,
"out_collision": 0,
"rate": {
"out_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"in_rate": 2000,
"in_rate_pkts": 2
},
"in_watchdog": 0,
"out_deferred": 0,
"out_mac_pause_frames": 0,
"in_pkts": 961622,
"in_multicast_pkts": 4286699522,
"in_runts": 0,
"out_unknown_protocl_drops": 0,
"in_no_buffer": 0,
"out_buffers_swapped": 0,
"out_lost_carrier": 0,
"out_errors": 0,
"in_errors": 0,
"in_octets": 72614643,
"in_crc_errors": 0,
"out_no_carrier": 0,
"in_with_dribble": 0,
"in_broadcast_pkts": 944788,
"out_pkts": 39281,
"out_late_collision": 0,
"out_octets": 6235318,
"in_overrun": 0,
"out_babble": 0
},
"auto_negotiate": True,
"phys_address": "0057.d228.1a02",
"keepalive": 10,
"output_hang": "never",
"txload": "1/255",
"oper_status": "up",
"arp_type": "arpa",
"rxload": "1/255",
"duplex_mode": "full",
"link_type": "auto",
"queues": {
"input_queue_size": 0,
"total_output_drop": 0,
"input_queue_drops": 0,
"input_queue_max": 2000,
"output_queue_size": 0,
"input_queue_flushes": 0,
"output_queue_max": 0,
"queue_strategy": "fifo"
},
"encapsulations": {
"encapsulation": "qinq virtual lan",
"first_dot1q": "10",
"second_dot1q": "20",
},
"last_input": "never",
"last_output": "1d22h",
"line_protocol": "up",
"mac_address": "0057.d228.1a02",
"connected": True,
"port_channel": {
"port_channel_member": True,
"port_channel_member_intfs": ['GigabitEthernet1/0/2'],
},
"arp_timeout": "04:00:00",
"bandwidth": 1000000,
"port_speed": "1000",
"enabled": True,
"mtu": 1500,
"delay": 10,
"reliability": "255/255"
},
"GigabitEthernet1/0/1": {
"flow_control": {
"send": False,
"receive": False
},
"type": "Gigabit Ethernet",
"counters": {
"out_buffer_failure": 0,
"out_underruns": 0,
"in_giants": 0,
"in_throttles": 0,
"in_frame": 0,
"in_ignored": 0,
"last_clear": "1d02h",
"out_interface_resets": 2,
"in_mac_pause_frames": 0,
"out_collision": 0,
"rate": {
"out_rate_pkts": 0,
"load_interval": 30,
"out_rate": 0,
"in_rate": 0,
"in_rate_pkts": 0
},
"in_watchdog": 0,
"out_deferred": 0,
"out_mac_pause_frames": 0,
"in_pkts": 12127,
"in_multicast_pkts": 4171,
"in_runts": 0,
"out_unknown_protocl_drops": 0,
"in_no_buffer": 0,
"out_buffers_swapped": 0,
"out_lost_carrier": 0,
"out_errors": 0,
"in_errors": 0,
"in_octets": 2297417,
"in_crc_errors": 0,
"out_no_carrier": 0,
"in_with_dribble": 0,
"in_broadcast_pkts": 0,
"out_pkts": 12229,
"out_late_collision": 0,
"out_octets": 2321107,
"in_overrun": 0,
"out_babble": 0
},
"phys_address": "0057.d228.1a64",
"keepalive": 10,
"output_hang": "never",
"txload": "1/255",
"description": "desc",
"oper_status": "down",
"arp_type": "arpa",
"rxload": "1/255",
"duplex_mode": "auto",
"queues": {
"input_queue_size": 0,
"total_output_drop": 0,
"input_queue_drops": 0,
"input_queue_max": 375,
"output_queue_size": 0,
"input_queue_flushes": 0,
"output_queue_max": 40,
"queue_strategy": "fifo"
},
"ipv4": {
"10.1.1.1/24": {
"prefix_length": "24",
"ip": "10.1.1.1"
}
},
"encapsulations": {
"encapsulation": "arpa"
},
"last_input": "never",
"last_output": "04:39:18",
"line_protocol": "down",
"mac_address": "0057.d228.1a64",
"connected": False,
"port_channel": {
"port_channel_member": False
},
"media_type": "10/100/1000BaseTX",
"bandwidth": 768,
"port_speed": "1000",
"enabled": False,
"arp_timeout": "04:00:00",
"mtu": 1500,
"delay": 3330,
"reliability": "255/255"
},
"GigabitEthernet3": {
"flow_control": {
"send": False,
"receive": False
},
"type": "CSR vNIC",
'auto_negotiate': True,
'duplex_mode': 'full',
'link_type': 'auto',
'media_type': 'RJ45',
'port_speed': '1000',
"counters": {
"out_buffer_failure": 0,
"out_underruns": 0,
"in_giants": 0,
"in_throttles": 0,
"in_frame": 0,
"in_ignored": 0,
"last_clear": "never",
"out_interface_resets": 1,
"in_mac_pause_frames": 0,
"out_collision": 0,
"in_crc_errors": 0,
"rate": {
"out_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"in_rate": 0,
"in_rate_pkts": 0
},
"in_watchdog": 0,
"out_deferred": 0,
"out_mac_pause_frames": 0,
"in_pkts": 6,
"in_multicast_pkts": 0,
"in_runts": 0,
"in_no_buffer": 0,
"out_buffers_swapped": 0,
"out_errors": 0,
"in_errors": 0,
"in_octets": 480,
"out_unknown_protocl_drops": 0,
"out_no_carrier": 0,
"out_lost_carrier": 0,
"in_broadcast_pkts": 0,
"out_pkts": 28,
"out_late_collision": 0,
"out_octets": 7820,
"in_overrun": 0,
"out_babble": 0
},
"phys_address": "5254.0072.9b0c",
"keepalive": 10,
"output_hang": "never",
"txload": "1/255",
"reliability": "255/255",
"arp_type": "arpa",
"rxload": "1/255",
"queues": {
"input_queue_size": 0,
"total_output_drop": 0,
"input_queue_drops": 0,
"input_queue_max": 375,
"output_queue_size": 0,
"input_queue_flushes": 0,
"output_queue_max": 40,
"queue_strategy": "fifo"
},
"ipv4": {
"200.2.1.1/24": {
"prefix_length": "24",
"ip": "200.2.1.1"
},
"unnumbered": {
"interface_ref": "Loopback0"
}
},
"encapsulations": {
"encapsulation": "arpa"
},
"last_output": "00:00:27",
"line_protocol": "up",
"mac_address": "5254.0072.9b0c",
"oper_status": "up",
"port_channel": {
"port_channel_member": False
},
"arp_timeout": "04:00:00",
"bandwidth": 1000000,
"enabled": True,
"mtu": 1500,
"delay": 10,
"last_input": "never"
},
"Loopback0": {
"queues": {
"input_queue_size": 0,
"total_output_drop": 0,
"input_queue_drops": 0,
"input_queue_max": 75,
"output_queue_size": 0,
"input_queue_flushes": 0,
"output_queue_max": 0,
"queue_strategy": "fifo"
},
"mtu": 1514,
"encapsulations": {
"encapsulation": "loopback"
},
"last_output": "never",
"type": "Loopback",
"line_protocol": "up",
"oper_status": "up",
"keepalive": 10,
"output_hang": "never",
"txload": "1/255",
"counters": {
"out_buffer_failure": 0,
"out_underruns": 0,
"in_giants": 0,
"in_throttles": 0,
"in_frame": 0,
"in_ignored": 0,
"last_clear": "1d04h",
"out_interface_resets": 0,
"out_collision": 0,
"rate": {
"out_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"in_rate": 0,
"in_rate_pkts": 0
},
"in_pkts": 0,
"in_multicast_pkts": 0,
"in_runts": 0,
"in_no_buffer": 0,
"out_buffers_swapped": 0,
"out_errors": 0,
"in_errors": 0,
"in_octets": 0,
"in_crc_errors": 0,
"out_unknown_protocl_drops": 0,
"in_broadcast_pkts": 0,
"out_pkts": 72,
"out_octets": 5760,
"in_overrun": 0,
"in_abort": 0
},
"reliability": "255/255",
"bandwidth": 8000000,
"port_channel": {
"port_channel_member": False
},
"enabled": True,
"ipv4": {
"200.2.1.1/24": {
"prefix_length": "24",
"ip": "200.2.1.1"
}
},
"rxload": "1/255",
"delay": 5000,
"last_input": "1d02h"
},
"Vlan100": {
"type": "Ethernet SVI",
"counters": {
"out_buffer_failure": 0,
"out_underruns": 0,
"in_giants": 0,
"in_throttles": 0,
"in_frame": 0,
"in_ignored": 0,
"last_clear": "1d04h",
"out_interface_resets": 0,
"rate": {
"out_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"in_rate": 0,
"in_rate_pkts": 0
},
"in_pkts": 50790,
"in_multicast_pkts": 0,
"in_runts": 0,
"in_no_buffer": 0,
"out_buffers_swapped": 0,
"out_errors": 0,
"in_errors": 0,
"in_octets": 3657594,
"in_crc_errors": 0,
"out_unknown_protocl_drops": 0,
"in_broadcast_pkts": 0,
"out_pkts": 72,
"out_octets": 5526,
"in_overrun": 0
},
"phys_address": "0057.d228.1a51",
"queues": {
"input_queue_size": 0,
"total_output_drop": 0,
"input_queue_drops": 0,
"input_queue_max": 375,
"output_queue_size": 0,
"input_queue_flushes": 0,
"output_queue_max": 40,
"queue_strategy": "fifo"
},
"txload": "1/255",
"reliability": "255/255",
"arp_type": "arpa",
"rxload": "1/255",
"output_hang": "never",
"ipv4": {
"201.0.12.1/24": {
"prefix_length": "24",
"ip": "201.0.12.1"
}
},
"encapsulations": {
"encapsulation": "arpa"
},
"last_output": "1d03h",
"line_protocol": "up",
"mac_address": "0057.d228.1a51",
"oper_status": "up",
"port_channel": {
"port_channel_member": False
},
"arp_timeout": "04:00:00",
"bandwidth": 1000000,
"enabled": True,
"mtu": 1500,
"delay": 10,
"last_input": "never"
},
"GigabitEthernet1/0/2": {
"flow_control": {
"send": False,
"receive": False
},
"type": "Gigabit Ethernet",
"counters": {
"out_buffer_failure": 0,
"out_underruns": 0,
"in_giants": 0,
"in_throttles": 0,
"in_frame": 0,
"in_ignored": 0,
"last_clear": "1d02h",
"out_interface_resets": 5,
"in_mac_pause_frames": 0,
"out_collision": 0,
"rate": {
"out_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"in_rate": 3000,
"in_rate_pkts": 5
},
"in_watchdog": 0,
"out_deferred": 0,
"out_mac_pause_frames": 0,
"in_pkts": 545526,
"in_multicast_pkts": 535961,
"in_runts": 0,
"out_unknown_protocl_drops": 0,
"in_no_buffer": 0,
"out_buffers_swapped": 0,
"out_lost_carrier": 0,
"out_errors": 0,
"in_errors": 0,
"in_octets": 41210298,
"in_crc_errors": 0,
"out_no_carrier": 0,
"in_with_dribble": 0,
"in_broadcast_pkts": 535961,
"out_pkts": 23376,
"out_late_collision": 0,
"out_octets": 3642296,
"in_overrun": 0,
"out_babble": 0
},
"phys_address": "0057.d228.1a02",
"keepalive": 10,
"output_hang": "never",
"txload": "1/255",
"oper_status": "up",
"arp_type": "arpa",
"media_type": "10/100/1000BaseTX",
"rxload": "1/255",
"duplex_mode": "full",
"queues": {
"input_queue_size": 0,
"total_output_drop": 0,
"input_queue_drops": 0,
"input_queue_max": 2000,
"output_queue_size": 0,
"input_queue_flushes": 0,
"output_queue_max": 40,
"queue_strategy": "fifo"
},
"encapsulations": {
"encapsulation": "arpa"
},
"last_input": "never",
"last_output": "00:00:02",
"line_protocol": "up",
"mac_address": "0057.d228.1a02",
"connected": True,
"port_channel": {
"port_channel_member": False
},
"arp_timeout": "04:00:00",
"bandwidth": 1000000,
"port_speed": "1000",
"enabled": True,
"mtu": 1500,
"delay": 10,
"reliability": "255/255"
}
}
ShowEtherchannelSummary = {
'number_of_lag_in_use': 2,
'number_of_aggregators': 2,
'interfaces': {
'Port-channel2': {
'name': 'Port-channel2',
'bundle_id': 2,
'protocol': 'lacp',
'flags': 'RU',
'oper_status': 'up',
'members': {
'GigabitEthernet0/0/0': {
'interface': 'GigabitEthernet0/0/0',
'flags': 'bndl',
'bundled': True,
'port_channel': {
"port_channel_member": True,
"port_channel_int": "Port-channel2"
},
},
'GigabitEthernet0/0/1': {
'interface': 'GigabitEthernet0/0/1',
'flags': 'hot-sby',
'bundled': False,
'port_channel': {
"port_channel_member": True,
"port_channel_int": "Port-channel2"
},
},
},
'port_channel': {
'port_channel_member': True,
'port_channel_member_intfs': ['GigabitEthernet0/0/0', 'GigabitEthernet0/0/1'],
}
},
},
}
ShowIpInterface = {
"Vlan100": {
"sevurity_level": "default",
"ip_route_cache_flags": [
"CEF",
"Fast"
],
"enabled": True,
"oper_status": "up",
"address_determined_by": "configuration file",
"router_discovery": False,
"ip_multicast_fast_switching": False,
"split_horizon": True,
"bgp_policy_mapping": False,
"ip_output_packet_accounting": False,
"mtu": 1500,
"policy_routing": False,
"local_proxy_arp": False,
"proxy_arp": True,
"network_address_translation": False,
"ip_cef_switching_turbo_vector": True,
"icmp": {
"redirects": "always sent",
"mask_replies": "never sent",
"unreachables": "always sent",
},
"ipv4": {
"201.11.14.1/24": {
"prefix_length": "24",
"ip": "201.11.14.1",
"secondary": False,
"broadcase_address": "255.255.255.255"
}
},
"ip_access_violation_accounting": False,
"ip_cef_switching": True,
"unicast_routing_topologies": {
"topology": {
"base": {
"status": "up"
}
},
},
"ip_null_turbo_vector": True,
"probe_proxy_name_replies": False,
"ip_fast_switching": True,
"ip_multicast_distributed_fast_switching": False,
"tcp_ip_header_compression": False,
"rtp_ip_header_compression": False,
"input_features": ["MCI Check"],
"directed_broadcast_forwarding": False,
"ip_flow_switching": False
},
"GigabitEthernet0/0": {
"sevurity_level": "default",
'address_determined_by': 'setup command',
"ip_route_cache_flags": [
"CEF",
"Fast"
],
"enabled": True,
"oper_status": "up",
"router_discovery": False,
"ip_multicast_fast_switching": False,
"split_horizon": True,
"bgp_policy_mapping": False,
"ip_output_packet_accounting": False,
"mtu": 1500,
"policy_routing": False,
"local_proxy_arp": False,
"vrf": "Mgmt-vrf",
"proxy_arp": True,
"network_address_translation": False,
"ip_cef_switching_turbo_vector": True,
"icmp": {
"redirects": "always sent",
"mask_replies": "never sent",
"unreachables": "always sent",
},
"ipv4": {
"10.1.8.134/24": {
"prefix_length": "24",
"ip": "10.1.8.134",
"secondary": False,
"broadcase_address": "255.255.255.255"
}
},
"ip_access_violation_accounting": False,
"ip_cef_switching": True,
"unicast_routing_topologies": {
"topology": {
"base": {
"status": "up"
}
},
},
"ip_null_turbo_vector": True,
"probe_proxy_name_replies": False,
"ip_fast_switching": True,
"ip_multicast_distributed_fast_switching": False,
"tcp_ip_header_compression": False,
"rtp_ip_header_compression": False,
"input_features": ["MCI Check"],
"directed_broadcast_forwarding": False,
"ip_flow_switching": False
},
"GigabitEthernet2": {
"enabled": False,
"oper_status": "down"
},
"GigabitEthernet1/0/1": {
"sevurity_level": "default",
'address_determined_by': 'setup command',
"ip_route_cache_flags": [
"CEF",
"Fast"
],
"enabled": False,
"oper_status": "down",
"router_discovery": False,
"ip_multicast_fast_switching": False,
"split_horizon": True,
"bgp_policy_mapping": False,
"ip_output_packet_accounting": False,
"mtu": 1500,
"policy_routing": False,
"local_proxy_arp": False,
"proxy_arp": True,
"network_address_translation": False,
"ip_cef_switching_turbo_vector": True,
"icmp": {
"redirects": "always sent",
"mask_replies": "never sent",
"unreachables": "always sent",
},
"ipv4": {
"10.1.1.1/24": {
"prefix_length": "24",
"ip": "10.1.1.1",
"secondary": False,
"broadcase_address": "255.255.255.255"
},
"10.2.2.2/24": {
"prefix_length": "24",
"ip": "10.2.2.2",
"secondary": True
},
},
"ip_access_violation_accounting": False,
"ip_cef_switching": True,
"unicast_routing_topologies": {
"topology": {
"base": {
"status": "up"
}
},
},
'wccp': {
'redirect_outbound': False,
'redirect_inbound': False,
'redirect_exclude': False,
},
"ip_null_turbo_vector": True,
"probe_proxy_name_replies": False,
"ip_fast_switching": True,
"ip_multicast_distributed_fast_switching": False,
"tcp_ip_header_compression": False,
"rtp_ip_header_compression": False,
"directed_broadcast_forwarding": False,
"ip_flow_switching": False,
"input_features": ["MCI Check", "QoS Classification", "QoS Marking"],
}
}
ShowIpv6Interface = {
"GigabitEthernet1/0/1": {
"joined_group_addresses": [
"FF02::1"
],
"ipv6": {
"2001:DB8:2:2::2/64": {
"ip": "2001:DB8:2:2::2",
"prefix_length": "64",
"status": "tentative"
},
"2000::1/126": {
"ip": "2000::1",
"prefix_length": "126",
"status": "tentative"
},
"2001:DB8:1:1::1/64": {
"ip": "2001:DB8:1:1::1",
"prefix_length": "64",
"status": "tentative"
},
"2001:DB8:4:4:257:D2FF:FE28:1A64/64": {
"ip": "2001:DB8:4:4:257:D2FF:FE28:1A64",
"prefix_length": "64",
"status": "tentative",
"eui_64": True
},
"2001:DB8:3:3::3/64": {
"ip": "2001:DB8:3:3::3",
"prefix_length": "64",
"status": "tentative",
"anycast": True
},
"FE80::257:D2FF:FE28:1A64": {
"ip": "FE80::257:D2FF:FE28:1A64",
"status": "tentative",
"origin": "link_layer",
},
"enabled": True,
"nd": {
"dad_attempts": 1,
"ns_retransmit_interval": 1000,
"dad_enabled": True,
"reachable_time": 30000,
"using_time": 30000
},
"icmp": {
"error_messages_limited": 100,
"redirects": True,
"unreachables": "sent"
},
},
"oper_status": "down",
"enabled": False,
"mtu": 1500
},
"Vlan211": {
"joined_group_addresses": [
"FF02::1",
"FF02::1:FF14:1",
"FF02::1:FF28:1A71"
],
"ipv6": {
"2001:10::14:1/112": {
"ip": "2001:10::14:1",
"prefix_length": "112",
"status": "valid",
'autoconf': {
'preferred_lifetime': 604711,
'valid_lifetime': 2591911,
},
},
"FE80::257:D2FF:FE28:1A71": {
"ip": "FE80::257:D2FF:FE28:1A71",
"status": "valid",
"origin": "link_layer",
},
"enabled": True,
"nd": {
"dad_attempts": 1,
"ns_retransmit_interval": 1000,
"dad_enabled": True,
"reachable_time": 30000,
"using_time": 30000
},
"icmp": {
"error_messages_limited": 100,
"redirects": True,
"unreachables": "sent"
},
},
"oper_status": "up",
"enabled": True,
"autoconf": True,
"mtu": 1500
},
"GigabitEthernet3": {
"enabled": True,
"joined_group_addresses": [
"FF02::1",
"FF02::1:FF1E:4F2",
"FF02::2"
],
"ipv6": {
"enabled": False,
"FE80::5054:FF:FE1E:4F2": {
"ip": "FE80::5054:FF:FE1E:4F2",
"status": "valid",
"origin": "link_layer",
},
"unnumbered": {
"interface_ref": "Loopback0",
},
"nd": {
"dad_attempts": 1,
"reachable_time": 30000,
"using_time": 30000,
"dad_enabled": True
},
"icmp": {
"unreachables": "sent",
"redirects": True,
"error_messages_limited": 100
},
"nd": {
"dad_attempts": 1,
"dad_enabled": True,
"reachable_time": 30000,
"using_time": 30000,
"advertised_reachable_time": 0,
"advertised_retransmit_interval": 0,
"router_advertisements_interval": 200,
"router_advertisements_live": 1800,
"advertised_default_router_preference": 'Medium',
"advertised_reachable_time_unspecified": False,
"advertised_retransmit_interval_unspecified": False,
},
},
"oper_status": "up",
"mtu": 1500,
"addresses_config_method": 'stateless autoconfig',
}
}
ShowVrfDetail = {
"Mgmt-vrf": {
"vrf_id": 1,
"interfaces": [
"GigabitEthernet0/0"
],
"address_family": {
"ipv4 unicast": {
"table_id": "0x1"
},
"ipv6 unicast": {
"table_id": "0x1E000001"
}
},
"flags": "0x0"
},
"VRF1": {
"interfaces": [
"GigabitEthernet1/0/2"
],
"address_family": {
"ipv4 unicast": {
"export_to_global": {
"export_to_global_map": "export_to_global_map",
"prefix_limit": 1000
},
"import_from_global": {
"prefix_limit": 1000,
"import_from_global_map": "import_from_global_map"
},
"table_id": "0x1",
"routing_table_limit": {
"routing_table_limit_action": {
"enable_alert_percent": {
"alert_percent_value": 10000
}
}
},
"route_target": {
"200:1": {
"rt_type": "both",
"route_target": "200:1"
},
"100:1": {
"rt_type": "both",
"route_target": "100:1"
}
}
},
"ipv6 unicast": {
"export_to_global": {
"export_to_global_map": "export_to_global_map",
"prefix_limit": 1000
},
"table_id": "0x1E000001",
"routing_table_limit": {
"routing_table_limit_action": {
"enable_alert_percent": {
"alert_percent_value": 7000
}
},
"routing_table_limit_number": 10000
},
"route_target": {
"200:1": {
"rt_type": "import",
"route_target": "200:1"
},
"400:1": {
"rt_type": "import",
"route_target": "400:1"
},
"300:1": {
"rt_type": "export",
"route_target": "300:1"
},
"100:1": {
"rt_type": "export",
"route_target": "100:1"
}
}
}
},
"flags": "0x100",
"route_distinguisher": "100:1",
"vrf_id": 1
}
}
ShowInterfacesAccounting = {
"GigabitEthernet1/0/1": {
"accounting": {
"arp": {
"chars_in": 4590030,
"chars_out": 120,
"pkts_in": 109280,
"pkts_out": 2
},
"ip": {
"chars_in": 2173570,
"chars_out": 2167858,
"pkts_in": 22150,
"pkts_out": 22121
},
"ipv6": {
"chars_in": 1944,
"chars_out": 0,
"pkts_in": 24,
"pkts_out": 0
},
"other": {
"chars_in": 5306164,
"chars_out": 120,
"pkts_in": 112674,
"pkts_out": 2
}
}
},
"GigabitEthernet1/0/2": {
"accounting": {
"arp": {
"chars_in": 5460,
"chars_out": 5520,
"pkts_in": 91,
"pkts_out": 92
},
"ip": {
"chars_in": 968690,
"chars_out": 1148402,
"pkts_in": 11745,
"pkts_out": 10821
},
"ipv6": {
"chars_in": 70,
"chars_out": 0,
"pkts_in": 1,
"pkts_out": 0
},
"other": {
"chars_in": 741524,
"chars_out": 5520,
"pkts_in": 3483,
"pkts_out": 92
}
}
}
}
InterfaceOpsOutput_info = {
"Port-channel12": {
"switchport_enable": False,
"type": "EtherChannel",
"oper_status": "up",
"mac_address": "0057.d228.1a02",
"duplex_mode": "full",
"port_speed": "1000",
"delay": 10,
"phys_address": "0057.d228.1a02",
"port_channel": {
"port_channel_member": True,
"port_channel_member_intfs": [
"GigabitEthernet1/0/2"
]
},
"auto_negotiate": True,
"mtu": 1500,
"flow_control": {
"receive": False,
"send": False
},
"enabled": True,
"counters": {
"last_clear": "1d23h",
"in_pkts": 961622,
"out_errors": 0,
"in_octets": 72614643,
"out_octets": 6235318,
"in_broadcast_pkts": 944788,
"rate": {
"out_rate_pkts": 0,
"out_rate": 0,
"load_interval": 300,
"in_rate": 2000,
"in_rate_pkts": 2
},
"out_pkts": 39281,
"in_multicast_pkts": 4286699522,
"in_crc_errors": 0,
"in_mac_pause_frames": 0,
"in_errors": 0,
"out_mac_pause_frames": 0
},
"bandwidth": 1000000,
"encapsulation": {
"second_dot1q": "20",
"first_dot1q": "10",
"encapsulation": "qinq virtual lan"
}
},
"Loopback0": {
"switchport_enable": False,
"type": "Loopback",
"oper_status": "up",
"encapsulation": {
"encapsulation": "loopback"
},
"mtu": 1514,
"port_channel": {
"port_channel_member": False
},
"enabled": True,
"counters": {
"out_errors": 0,
"last_clear": "1d04h",
"in_broadcast_pkts": 0,
"rate": {
"out_rate_pkts": 0,
"out_rate": 0,
"load_interval": 300,
"in_rate": 0,
"in_rate_pkts": 0
},
"in_pkts": 0,
"out_pkts": 72,
"in_errors": 0,
"out_octets": 5760,
"in_multicast_pkts": 0,
"in_crc_errors": 0,
"in_octets": 0
},
"bandwidth": 8000000,
"delay": 5000
},
"Vlan211": {
'switchport_enable': False,
"ipv6": {
"2001:10::14:1/112": {
"ip": "2001:10::14:1",
"autoconf": {
"valid_lifetime": 2591911,
"preferred_lifetime": 604711
},
"prefix_length": "112",
"status": "valid"
},
"FE80::257:D2FF:FE28:1A71": {
"origin": "link_layer",
"ip": "FE80::257:D2FF:FE28:1A71",
"status": "valid"
}
}
},
"GigabitEthernet1/0/1": {
"accounting": {
"arp": {
"chars_in": 4590030,
"chars_out": 120,
"pkts_in": 109280,
"pkts_out": 2
},
"ip": {
"chars_in": 2173570,
"chars_out": 2167858,
"pkts_in": 22150,
"pkts_out": 22121
},
"ipv6": {
"chars_in": 1944,
"chars_out": 0,
"pkts_in": 24,
"pkts_out": 0
},
"other": {
"chars_in": 5306164,
"chars_out": 120,
"pkts_in": 112674,
"pkts_out": 2
}
},
"switchport_enable": False,
"type": "Gigabit Ethernet",
"oper_status": "down",
"ipv4": {
"10.2.2.2/24": {
"secondary": True,
"ip": "10.2.2.2",
"prefix_length": "24"
},
"10.1.1.1/24": {
"secondary": False,
"ip": "10.1.1.1",
"prefix_length": "24"
}
},
"mac_address": "0057.d228.1a64",
"duplex_mode": "auto",
"port_speed": "1000",
"delay": 3330,
"phys_address": "0057.d228.1a64",
"port_channel": {
"port_channel_member": False
},
"encapsulation": {
"encapsulation": "arpa"
},
"mtu": 1500,
"description": "desc",
"flow_control": {
"receive": False,
"send": False
},
"enabled": False,
"counters": {
"last_clear": "1d02h",
"in_pkts": 12127,
"out_errors": 0,
"in_octets": 2297417,
"out_octets": 2321107,
"in_broadcast_pkts": 0,
"rate": {
"out_rate_pkts": 0,
"out_rate": 0,
"load_interval": 30,
"in_rate": 0,
"in_rate_pkts": 0
},
"out_pkts": 12229,
"in_multicast_pkts": 4171,
"in_crc_errors": 0,
"in_mac_pause_frames": 0,
"in_errors": 0,
"out_mac_pause_frames": 0
},
"bandwidth": 768,
"ipv6": {
"FE80::257:D2FF:FE28:1A64": {
"origin": "link_layer",
"ip": "FE80::257:D2FF:FE28:1A64",
"status": "tentative"
},
"2001:DB8:4:4:257:D2FF:FE28:1A64/64": {
"ip": "2001:DB8:4:4:257:D2FF:FE28:1A64",
"eui_64": True,
"prefix_length": "64",
"status": "tentative"
},
"2000::1/126": {
"ip": "2000::1",
"prefix_length": "126",
"status": "tentative"
},
"2001:DB8:1:1::1/64": {
"ip": "2001:DB8:1:1::1",
"prefix_length": "64",
"status": "tentative"
},
"2001:DB8:2:2::2/64": {
"ip": "2001:DB8:2:2::2",
"prefix_length": "64",
"status": "tentative"
},
"2001:DB8:3:3::3/64": {
"ip": "2001:DB8:3:3::3",
"anycast": True,
"prefix_length": "64",
"status": "tentative"
}
}
},
"GigabitEthernet1/0/2": {
"accounting": {
"arp": {
"chars_in": 5460,
"chars_out": 5520,
"pkts_in": 91,
"pkts_out": 92
},
"ip": {
"chars_in": 968690,
"chars_out": 1148402,
"pkts_in": 11745,
"pkts_out": 10821
},
"ipv6": {
"chars_in": 70,
"chars_out": 0,
"pkts_in": 1,
"pkts_out": 0
},
"other": {
"chars_in": 741524,
"chars_out": 5520,
"pkts_in": 3483,
"pkts_out": 92
}
},
"switchport_enable": False,
"type": "Gigabit Ethernet",
"oper_status": "up",
"mac_address": "0057.d228.1a02",
"duplex_mode": "full",
"vrf": "VRF1",
"delay": 10,
"phys_address": "0057.d228.1a02",
"port_channel": {
"port_channel_member": False
},
"port_speed": "1000",
"encapsulation": {
"encapsulation": "arpa"
},
"mtu": 1500,
"flow_control": {
"receive": False,
"send": False
},
"enabled": True,
"counters": {
"last_clear": "1d02h",
"in_pkts": 545526,
"out_errors": 0,
"in_octets": 41210298,
"out_octets": 3642296,
"in_broadcast_pkts": 535961,
"rate": {
"out_rate_pkts": 0,
"out_rate": 0,
"load_interval": 300,
"in_rate": 3000,
"in_rate_pkts": 5
},
"out_pkts": 23376,
"in_multicast_pkts": 535961,
"in_crc_errors": 0,
"in_mac_pause_frames": 0,
"in_errors": 0,
"out_mac_pause_frames": 0
},
"bandwidth": 1000000
},
"Vlan100": {
"switchport_enable": False,
"type": "Ethernet SVI",
"oper_status": "up",
"ipv4": {
"201.11.14.1/24": {
"secondary": False,
"ip": "201.11.14.1",
"prefix_length": "24"
}
},
"mac_address": "0057.d228.1a51",
"delay": 10,
"phys_address": "0057.d228.1a51",
"port_channel": {
"port_channel_member": False
},
"encapsulation": {
"encapsulation": "arpa"
},
"mtu": 1500,
"enabled": True,
"counters": {
"out_errors": 0,
"last_clear": "1d04h",
"in_broadcast_pkts": 0,
"rate": {
"out_rate_pkts": 0,
"out_rate": 0,
"load_interval": 300,
"in_rate": 0,
"in_rate_pkts": 0
},
"in_pkts": 50790,
"out_pkts": 72,
"in_errors": 0,
"out_octets": 5526,
"in_multicast_pkts": 0,
"in_crc_errors": 0,
"in_octets": 3657594
},
"bandwidth": 1000000
},
"GigabitEthernet0/0": {
"switchport_enable": False,
"ipv4": {
"10.1.8.134/24": {
"secondary": False,
"ip": "10.1.8.134",
"prefix_length": "24"
}
},
"vrf": "Mgmt-vrf"
},
"GigabitEthernet3": {
"switchport_enable": False,
"type": "CSR vNIC",
"oper_status": "up",
"ipv4": {
"unnumbered": {
"interface_ref": "Loopback0"
}
},
"mac_address": "5254.0072.9b0c",
"duplex_mode": "full",
"port_speed": "1000",
"delay": 10,
"phys_address": "5254.0072.9b0c",
"port_channel": {
"port_channel_member": False
},
"ipv6": {
"FE80::5054:FF:FE1E:4F2": {
"origin": "link_layer",
"ip": "FE80::5054:FF:FE1E:4F2",
"status": "valid"
}
},
"auto_negotiate": True,
"mtu": 1500,
"flow_control": {
"receive": False,
"send": False
},
"enabled": True,
"counters": {
"last_clear": "never",
"in_pkts": 6,
"out_errors": 0,
"in_octets": 480,
"out_octets": 7820,
"in_broadcast_pkts": 0,
"rate": {
"out_rate_pkts": 0,
"out_rate": 0,
"load_interval": 300,
"in_rate": 0,
"in_rate_pkts": 0
},
"out_pkts": 28,
"in_multicast_pkts": 0,
"in_crc_errors": 0,
"in_mac_pause_frames": 0,
"in_errors": 0,
"out_mac_pause_frames": 0
},
"bandwidth": 1000000,
"encapsulation": {
"encapsulation": "arpa"
}
},
"Port-channel2": {
"port_channel": {
"port_channel_member": True,
"port_channel_member_intfs": [
"GigabitEthernet0/0/0",
"GigabitEthernet0/0/1"
]
},
},
"GigabitEthernet0/0/0": {
"port_channel": {
"port_channel_member": True,
"port_channel_int": "Port-channel2"
},
},
"GigabitEthernet0/0/1": {
"port_channel": {
"port_channel_member": True,
"port_channel_int": "Port-channel2"
},
}
}
| 34.352044
| 98
| 0.359976
| 3,732
| 49,570
| 4.476688
| 0.110932
| 0.017777
| 0.011851
| 0.023703
| 0.808883
| 0.741366
| 0.69827
| 0.67361
| 0.621536
| 0.613096
| 0
| 0.10365
| 0.516926
| 49,570
| 1,442
| 99
| 34.375867
| 0.594045
| 0.000908
| 0
| 0.751748
| 0
| 0
| 0.327793
| 0.044733
| 0
| 0
| 0.000687
| 0
| 0
| 1
| 0
| false
| 0
| 0.002797
| 0
| 0.008392
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe4160d77487045b711cc958f54259f1edc755c7
| 10,355
|
py
|
Python
|
tests/functional/test_cache.py
|
Tesorio/django-migration-linter
|
832c1e2dd9b116592a7e291e4288f24621d11528
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_cache.py
|
Tesorio/django-migration-linter
|
832c1e2dd9b116592a7e291e4288f24621d11528
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_cache.py
|
Tesorio/django-migration-linter
|
832c1e2dd9b116592a7e291e4288f24621d11528
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 3YOURMIND GmbH
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import unittest
from django.conf import settings
from django.db.migrations import Migration
from django_migration_linter import (
MigrationLinter,
analyse_sql_statements,
get_migration_abspath,
IgnoreMigration,
)
if sys.version_info >= (3, 3):
import unittest.mock as mock
else:
import mock
class OperationsIgnoreMigration(Migration):
operations = [IgnoreMigration()]
class CacheTestCase(unittest.TestCase):
def setUp(self):
self.test_project_path = os.path.dirname(settings.BASE_DIR)
@mock.patch(
"django_migration_linter.MigrationLinter._gather_all_migrations",
return_value=[
Migration("0001_create_table", "app_add_not_null_column"),
Migration("0002_add_new_not_null_field", "app_add_not_null_column"),
],
)
def test_cache_normal(self, *args):
linter = MigrationLinter(self.test_project_path)
linter.old_cache.clear()
linter.old_cache.save()
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
self.assertEqual(2, analyse_sql_statements_mock.call_count)
cache = linter.new_cache
cache.load()
self.assertEqual("OK", cache["4a3770a405738d457e2d23e17fb1f3aa"]["result"])
self.assertEqual("ERR", cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["result"])
self.assertListEqual(
[
{
"err_msg": "NOT NULL constraint on columns",
"code": "NOT_NULL",
"table": None,
"column": None,
}
],
cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["errors"],
)
# Start the Linter again -> should use cache now.
linter = MigrationLinter(self.test_project_path)
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
analyse_sql_statements_mock.assert_not_called()
self.assertTrue(linter.has_errors)
@mock.patch(
"django_migration_linter.MigrationLinter._gather_all_migrations",
return_value=[
Migration("0001_create_table", "app_add_not_null_column"),
Migration("0002_add_new_not_null_field", "app_add_not_null_column"),
],
)
def test_cache_different_databases(self, *args):
linter = MigrationLinter(self.test_project_path, database="mysql")
linter.old_cache.clear()
linter.old_cache.save()
linter = MigrationLinter(self.test_project_path, database="sqlite")
linter.old_cache.clear()
linter.old_cache.save()
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
self.assertEqual(2, analyse_sql_statements_mock.call_count)
cache = linter.new_cache
cache.load()
self.assertEqual("OK", cache["4a3770a405738d457e2d23e17fb1f3aa"]["result"])
self.assertEqual("ERR", cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["result"])
self.assertListEqual(
[
{
"err_msg": "NOT NULL constraint on columns",
"code": "NOT_NULL",
"table": None,
"column": None,
}
],
cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["errors"],
)
# Start the Linter again but with different database, should not be the same cache
linter = MigrationLinter(self.test_project_path, database="mysql")
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
self.assertEqual(2, analyse_sql_statements_mock.call_count)
cache = linter.new_cache
cache.load()
self.assertEqual("OK", cache["4a3770a405738d457e2d23e17fb1f3aa"]["result"])
self.assertEqual("ERR", cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["result"])
self.assertListEqual(
[
{
"err_msg": "NOT NULL constraint on columns",
"code": "NOT_NULL",
"table": None,
"column": None,
}
],
cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["errors"],
)
self.assertTrue(linter.has_errors)
@mock.patch(
"django_migration_linter.MigrationLinter._gather_all_migrations",
return_value=[
Migration("0001_initial", "app_ignore_migration"),
OperationsIgnoreMigration("0002_ignore_migration", "app_ignore_migration"),
],
)
def test_cache_ignored(self, *args):
linter = MigrationLinter(self.test_project_path, ignore_name_contains="0001")
linter.old_cache.clear()
linter.old_cache.save()
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
analyse_sql_statements_mock.assert_not_called()
cache = linter.new_cache
cache.load()
self.assertFalse(cache)
@mock.patch(
"django_migration_linter.MigrationLinter._gather_all_migrations",
return_value=[
Migration("0002_add_new_not_null_field", "app_add_not_null_column")
],
)
def test_cache_modified(self, *args):
linter = MigrationLinter(self.test_project_path)
linter.old_cache.clear()
linter.old_cache.save()
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
self.assertEqual(1, analyse_sql_statements_mock.call_count)
cache = linter.new_cache
cache.load()
self.assertEqual("ERR", cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["result"])
# Get the content of the migration file and mock the open call to append
# some content to change the hash
migration_path = get_migration_abspath(
"app_add_not_null_column", "0002_add_new_not_null_field"
)
with open(migration_path, "rb") as f:
file_content = f.read()
file_content += b"# test comment"
linter = MigrationLinter(self.test_project_path)
with mock.patch(
"django_migration_linter.migration_linter.open",
mock.mock_open(read_data=file_content),
):
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
self.assertEqual(1, analyse_sql_statements_mock.call_count)
cache = linter.new_cache
cache.load()
self.assertNotIn("19fd3ea688fc05e2cc2a6e67c0b7aa17", cache)
self.assertEqual(1, len(cache))
self.assertEqual("ERR", cache["a25768641a0ad526fad199f97c303784"]["result"])
@mock.patch(
"django_migration_linter.MigrationLinter._gather_all_migrations",
return_value=[
Migration("0001_create_table", "app_add_not_null_column"),
Migration("0002_add_new_not_null_field", "app_add_not_null_column"),
],
)
def test_ignore_cached_migration(self, *args):
linter = MigrationLinter(self.test_project_path)
linter.old_cache.clear()
linter.old_cache.save()
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
self.assertEqual(2, analyse_sql_statements_mock.call_count)
cache = linter.new_cache
cache.load()
self.assertEqual("OK", cache["4a3770a405738d457e2d23e17fb1f3aa"]["result"])
self.assertEqual("ERR", cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["result"])
self.assertListEqual(
[
{
"err_msg": "NOT NULL constraint on columns",
"code": "NOT_NULL",
"table": None,
"column": None,
}
],
cache["19fd3ea688fc05e2cc2a6e67c0b7aa17"]["errors"],
)
# Start the Linter again -> should use cache now but ignore the erroneous
linter = MigrationLinter(
self.test_project_path, ignore_name_contains="0002_add_new_not_null_field"
)
with mock.patch(
"django_migration_linter.migration_linter.analyse_sql_statements",
wraps=analyse_sql_statements,
) as analyse_sql_statements_mock:
linter.lint_all_migrations()
analyse_sql_statements_mock.assert_not_called()
self.assertFalse(linter.has_errors)
cache = linter.new_cache
cache.load()
self.assertEqual(1, len(cache))
self.assertEqual("OK", cache["4a3770a405738d457e2d23e17fb1f3aa"]["result"])
| 35.954861
| 90
| 0.640367
| 1,073
| 10,355
| 5.862069
| 0.168686
| 0.058824
| 0.117647
| 0.06868
| 0.77186
| 0.768839
| 0.754372
| 0.724006
| 0.691892
| 0.67345
| 0
| 0.048268
| 0.269725
| 10,355
| 287
| 91
| 36.080139
| 0.783523
| 0.082183
| 0
| 0.702222
| 0
| 0
| 0.242201
| 0.189819
| 0
| 0
| 0
| 0
| 0.137778
| 1
| 0.026667
| false
| 0
| 0.035556
| 0
| 0.075556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe475d7e34dd95ef70993a864311f5c95753b6e9
| 89
|
py
|
Python
|
journal_project/note/admin.py
|
joseph-zabaleta/journal-api
|
575fcf8232be2fe4c4d73c8748e682d398a44d60
|
[
"MIT"
] | null | null | null |
journal_project/note/admin.py
|
joseph-zabaleta/journal-api
|
575fcf8232be2fe4c4d73c8748e682d398a44d60
|
[
"MIT"
] | 5
|
2021-03-30T13:58:54.000Z
|
2021-09-22T19:24:11.000Z
|
journal_project/note/admin.py
|
joseph-zabaleta/journal-api
|
575fcf8232be2fe4c4d73c8748e682d398a44d60
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import NoteModel
admin.register(NoteModel)
| 22.25
| 32
| 0.842697
| 12
| 89
| 6.25
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101124
| 89
| 4
| 33
| 22.25
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe5096141e0418cc7b77abc3ff902d009f8bc9f3
| 325
|
py
|
Python
|
spacetimeformer/__init__.py
|
bernhein/spacetimeformer
|
de252b68085943d979606fe69e177ac2a14586e7
|
[
"MIT"
] | null | null | null |
spacetimeformer/__init__.py
|
bernhein/spacetimeformer
|
de252b68085943d979606fe69e177ac2a14586e7
|
[
"MIT"
] | null | null | null |
spacetimeformer/__init__.py
|
bernhein/spacetimeformer
|
de252b68085943d979606fe69e177ac2a14586e7
|
[
"MIT"
] | null | null | null |
from . import data
from .time2vec import Time2Vec
from .predictor import Predictor
from . import lr_scheduler
from . import callbacks
from . import eval_stats
from . import plot
# from . import lstnet_model
from . import lstm_model
# from . import mtgnn_model
from . import spacetimeformer_model
# from . import linear_model
| 25
| 35
| 0.796923
| 45
| 325
| 5.6
| 0.377778
| 0.396825
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007299
| 0.156923
| 325
| 12
| 36
| 27.083333
| 0.912409
| 0.243077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe53c88006d44aa53ec7f378622554fb0c72fd46
| 164
|
py
|
Python
|
src/web/celery/worker.py
|
uncharted-causemos/wm-curation-recommendation
|
30c53a443a5744bd37ed11f6c647380d91540a61
|
[
"Apache-2.0"
] | null | null | null |
src/web/celery/worker.py
|
uncharted-causemos/wm-curation-recommendation
|
30c53a443a5744bd37ed11f6c647380d91540a61
|
[
"Apache-2.0"
] | null | null | null |
src/web/celery/worker.py
|
uncharted-causemos/wm-curation-recommendation
|
30c53a443a5744bd37ed11f6c647380d91540a61
|
[
"Apache-2.0"
] | null | null | null |
from celery import Celery
from web.application import create_application
from web.celery.config import configure
celery: Celery = configure(create_application())
| 23.428571
| 48
| 0.835366
| 21
| 164
| 6.428571
| 0.380952
| 0.103704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109756
| 164
| 6
| 49
| 27.333333
| 0.924658
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe7861c1b7b27c182e0905336360fb22e90d0d21
| 183
|
py
|
Python
|
coders/curso_regex/bordas/bordasPalavras.py
|
flaviogf/Cursos
|
2b120dbcd24a907121f58482fdcdfa01b164872c
|
[
"MIT"
] | 2
|
2021-02-20T23:50:07.000Z
|
2021-08-15T03:04:35.000Z
|
coders/curso_regex/bordas/bordasPalavras.py
|
flaviogf/Cursos
|
2b120dbcd24a907121f58482fdcdfa01b164872c
|
[
"MIT"
] | 18
|
2019-08-07T02:33:00.000Z
|
2021-03-18T22:52:38.000Z
|
coders/curso_regex/bordas/bordasPalavras.py
|
flaviogf/Cursos
|
2b120dbcd24a907121f58482fdcdfa01b164872c
|
[
"MIT"
] | 2
|
2020-09-28T13:00:09.000Z
|
2021-12-30T12:21:08.000Z
|
import re
texto = 'dia diafragma media'
print(re.findall(r'\bdia', texto)) # dia diafragma
print(re.findall(r'dia\b', texto)) # dia media
print(re.findall(r'\bdia\b', texto)) # dia
| 22.875
| 50
| 0.688525
| 31
| 183
| 4.064516
| 0.354839
| 0.253968
| 0.333333
| 0.357143
| 0.380952
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125683
| 183
| 7
| 51
| 26.142857
| 0.7875
| 0.147541
| 0
| 0
| 0
| 0
| 0.236842
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.6
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
fe7ba16ba07cb26f02e9fc778138f365105b5caf
| 214
|
py
|
Python
|
multauth/api/services/__init__.py
|
andrenerd/django-multiform-authentication
|
4a8b94ebd660cc7afc7dcdedcc12344ef85e6615
|
[
"MIT"
] | 7
|
2020-08-28T16:17:02.000Z
|
2021-11-11T18:01:20.000Z
|
multauth/api/services/__init__.py
|
andrenerd/django-multiform-authentication
|
4a8b94ebd660cc7afc7dcdedcc12344ef85e6615
|
[
"MIT"
] | null | null | null |
multauth/api/services/__init__.py
|
andrenerd/django-multiform-authentication
|
4a8b94ebd660cc7afc7dcdedcc12344ef85e6615
|
[
"MIT"
] | 2
|
2021-01-06T04:11:28.000Z
|
2021-05-19T14:43:52.000Z
|
# obsoleted # from ..authentication import TokenInactiveAuthentication
from ..decorators import swagger_auto_schema
from ..auth import serializers as auth_serializers
from ..me import serializers as me_serializers
| 42.8
| 70
| 0.845794
| 25
| 214
| 7.08
| 0.52
| 0.19209
| 0.214689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107477
| 214
| 4
| 71
| 53.5
| 0.926702
| 0.313084
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fe96601edb9e265983da76533d054bea14d12364
| 40
|
py
|
Python
|
run.py
|
rupav/tic-tac-toe
|
3c3d17c00d072ff8b8a3dae0948f559289c491fd
|
[
"MIT"
] | 1
|
2015-01-21T00:15:49.000Z
|
2015-01-21T00:15:49.000Z
|
runserver.py
|
oxford-space-apps/open-data-api
|
b6e380186c5bcee58a63a5c68b980d8bbbc1a70d
|
[
"MIT"
] | 2
|
2020-03-18T18:28:50.000Z
|
2020-03-25T07:39:23.000Z
|
runserver.py
|
oxford-space-apps/open-data-api
|
b6e380186c5bcee58a63a5c68b980d8bbbc1a70d
|
[
"MIT"
] | null | null | null |
from api import app
app.run(debug=True)
| 13.333333
| 19
| 0.775
| 8
| 40
| 3.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 40
| 2
| 20
| 20
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
228f656b2332cc9e3d6e8aa706dcc043051bd8f4
| 53
|
py
|
Python
|
enthought/traits/ui/toolkit_traits.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/traits/ui/toolkit_traits.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/traits/ui/toolkit_traits.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from traitsui.toolkit_traits import *
| 17.666667
| 37
| 0.811321
| 7
| 53
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 53
| 2
| 38
| 26.5
| 0.913043
| 0.226415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe015454155ef3d35965e414f3342535fc778429
| 185
|
py
|
Python
|
slixmppd.py
|
hwipl/nuqql-slixmppd
|
f692a3af3d1d5bd5d2ee0ed6c9b309a680c41e39
|
[
"MIT"
] | 1
|
2019-09-23T11:59:38.000Z
|
2019-09-23T11:59:38.000Z
|
slixmppd.py
|
hwipl/nuqql-slixmppd
|
f692a3af3d1d5bd5d2ee0ed6c9b309a680c41e39
|
[
"MIT"
] | null | null | null |
slixmppd.py
|
hwipl/nuqql-slixmppd
|
f692a3af3d1d5bd5d2ee0ed6c9b309a680c41e39
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Helper script for starting slixmppd
"""
import sys
import nuqql_slixmppd.main
import nuqql_slixmppd
# start slixmppd
sys.exit(nuqql_slixmppd.main.main())
| 13.214286
| 36
| 0.767568
| 26
| 185
| 5.346154
| 0.576923
| 0.280576
| 0.273381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006173
| 0.124324
| 185
| 13
| 37
| 14.230769
| 0.851852
| 0.389189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a3fa8b574c3e7a0c8d8147d0ca74ea13a7d54721
| 3,378
|
py
|
Python
|
auth0/v3/test/management/test_auth0.py
|
matthewarmand/auth0-python
|
1571b18535c46502e39e70f988f96624f2dfef21
|
[
"MIT"
] | null | null | null |
auth0/v3/test/management/test_auth0.py
|
matthewarmand/auth0-python
|
1571b18535c46502e39e70f988f96624f2dfef21
|
[
"MIT"
] | null | null | null |
auth0/v3/test/management/test_auth0.py
|
matthewarmand/auth0-python
|
1571b18535c46502e39e70f988f96624f2dfef21
|
[
"MIT"
] | null | null | null |
import unittest
from ...management.auth0 import Auth0
from ...management.blacklists import Blacklists
from ...management.clients import Clients
from ...management.client_grants import ClientGrants
from ...management.connections import Connections
from ...management.custom_domains import CustomDomains
from ...management.device_credentials import DeviceCredentials
from ...management.emails import Emails
from ...management.email_templates import EmailTemplates
from ...management.grants import Grants
from ...management.guardian import Guardian
from ...management.hooks import Hooks
from ...management.jobs import Jobs
from ...management.logs import Logs
from ...management.resource_servers import ResourceServers
from ...management.roles import Roles
from ...management.rules import Rules
from ...management.rules_configs import RulesConfigs
from ...management.stats import Stats
from ...management.tenants import Tenants
from ...management.tickets import Tickets
from ...management.user_blocks import UserBlocks
from ...management.users import Users
from ...management.users_by_email import UsersByEmail
class TestAuth0(unittest.TestCase):
def setUp(self):
self.domain = 'user.some.domain'
self.token = 'a-token'
self.a0 = Auth0(self.domain, self.token)
def test_blacklists(self):
self.assertIsInstance(self.a0.blacklists, Blacklists)
def test_clients(self):
self.assertIsInstance(self.a0.clients, Clients)
def test_client_grants(self):
self.assertIsInstance(self.a0.client_grants, ClientGrants)
def test_custom_domains(self):
self.assertIsInstance(self.a0.custom_domains, CustomDomains)
def test_connections(self):
self.assertIsInstance(self.a0.connections, Connections)
def test_device_credentials(self):
self.assertIsInstance(self.a0.device_credentials, DeviceCredentials)
def test_emails(self):
self.assertIsInstance(self.a0.emails, Emails)
def test_email_templates(self):
self.assertIsInstance(self.a0.email_templates, EmailTemplates)
def test_grants(self):
self.assertIsInstance(self.a0.grants, Grants)
def test_guardian(self):
self.assertIsInstance(self.a0.guardian, Guardian)
def test_hooks(self):
self.assertIsInstance(self.a0.hooks, Hooks)
def test_jobs(self):
self.assertIsInstance(self.a0.jobs, Jobs)
def test_logs(self):
self.assertIsInstance(self.a0.logs, Logs)
def test_resource_servers(self):
self.assertIsInstance(self.a0.resource_servers, ResourceServers)
def test_roles(self):
self.assertIsInstance(self.a0.roles, Roles)
def test_rules(self):
self.assertIsInstance(self.a0.rules, Rules)
def test_rules_configs(self):
self.assertIsInstance(self.a0.rules_configs, RulesConfigs)
def test_stats(self):
self.assertIsInstance(self.a0.stats, Stats)
def test_tenants(self):
self.assertIsInstance(self.a0.tenants, Tenants)
def test_tickets(self):
self.assertIsInstance(self.a0.tickets, Tickets)
def test_user_blocks(self):
self.assertIsInstance(self.a0.user_blocks, UserBlocks)
def test_users(self):
self.assertIsInstance(self.a0.users, Users)
def test_users_by_email(self):
self.assertIsInstance(self.a0.users_by_email, UsersByEmail)
| 32.796117
| 76
| 0.744819
| 402
| 3,378
| 6.134328
| 0.134328
| 0.136253
| 0.223844
| 0.261152
| 0.292782
| 0.085969
| 0
| 0
| 0
| 0
| 0
| 0.009845
| 0.158082
| 3,378
| 102
| 77
| 33.117647
| 0.857243
| 0
| 0
| 0
| 0
| 0
| 0.006809
| 0
| 0
| 0
| 0
| 0
| 0.302632
| 1
| 0.315789
| false
| 0
| 0.328947
| 0
| 0.657895
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
432e128ef1abd275775dc423ea72a0c3f34038f4
| 34,595
|
py
|
Python
|
tests/test_challenges.py
|
cryptobali/evalai-cli
|
a658e23a6c4c65ec10b017087de93d09bab08ab4
|
[
"BSD-3-Clause"
] | 1
|
2021-06-14T12:07:20.000Z
|
2021-06-14T12:07:20.000Z
|
tests/test_challenges.py
|
cryptobali/evalai-cli
|
a658e23a6c4c65ec10b017087de93d09bab08ab4
|
[
"BSD-3-Clause"
] | 729
|
2020-01-21T20:33:00.000Z
|
2021-08-02T23:22:14.000Z
|
tests/test_challenges.py
|
cryptobali/evalai-cli
|
a658e23a6c4c65ec10b017087de93d09bab08ab4
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import responses
from beautifultable import BeautifulTable
from click.testing import CliRunner
from datetime import datetime
from dateutil import tz
from evalai.challenges import challenge, challenges
from evalai.utils.urls import URLS
from evalai.utils.config import API_HOST_URL
from evalai.utils.common import (
convert_UTC_date_to_local,
validate_date_format,
clean_data,
)
from tests.data import challenge_response, submission_response
from .base import BaseTestClass
class TestDisplayChallenges(BaseTestClass):
def setup(self):
challenge_data = json.loads(challenge_response.challenges)
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_list.value),
json=challenge_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.past_challenge_list.value),
json=challenge_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_list.value),
json=challenge_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.future_challenge_list.value),
json=challenge_data,
status=200,
)
challenges_json = challenge_data["results"]
self.output = ""
table = BeautifulTable(max_width=200)
attributes = ["id", "title", "short_description"]
columns_attributes = [
"ID",
"Title",
"Short Description",
"Creator",
"Start Date",
"End Date",
]
table.column_headers = columns_attributes
for challenge_data in reversed(challenges_json):
values = list(map(lambda item: challenge_data[item], attributes))
creator = challenge_data["creator"]["team_name"]
start_date = convert_UTC_date_to_local(
challenge_data["start_date"]
)
end_date = convert_UTC_date_to_local(challenge_data["end_date"])
values.extend([creator, start_date, end_date])
table.append_row(values)
self.output = str(table)
@responses.activate
def test_display_all_challenge_lists(self):
runner = CliRunner()
result = runner.invoke(challenges)
response = result.output.strip()
assert response == self.output
@responses.activate
def test_display_past_challenge_lists(self):
runner = CliRunner()
result = runner.invoke(challenges, ["past"])
response = result.output.strip()
assert response == self.output
@responses.activate
def test_display_ongoing_challenge_lists(self):
runner = CliRunner()
result = runner.invoke(challenges, ["ongoing"])
response = result.output.strip()
assert response == self.output
@responses.activate
def test_display_future_challenge_lists(self):
runner = CliRunner()
result = runner.invoke(challenges, ["future"])
response = result.output.strip()
assert response == self.output
class TestDisplayChallengeDetails(BaseTestClass):
def setup(self):
self.challenge_data = json.loads(challenge_response.challenge_details)
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_details.value.format("1")),
json=self.challenge_data,
status=200,
)
@responses.activate
def test_display_challenge_details(self):
table = BeautifulTable(max_width=200)
attributes = [
"description",
"submission_guidelines",
"evaluation_details",
"terms_and_conditions",
]
column_attributes = [
"Start Date",
"End Date",
"Description",
"Submission Guidelines",
"Evaluation Details",
"Terms and Conditions",
]
table.column_headers = column_attributes
values = []
start_date = convert_UTC_date_to_local(
self.challenge_data["start_date"]
).split(" ")[0]
end_date = convert_UTC_date_to_local(
self.challenge_data["end_date"]
).split(" ")[0]
values.extend([start_date, end_date])
values.extend(
list(
map(
lambda item: clean_data(self.challenge_data[item]),
attributes,
)
)
)
table.append_row(values)
expected = str(table)
runner = CliRunner()
result = runner.invoke(challenge, ["1"])
response = result.output.strip()
assert response == expected
class TestOngoingChallengesConditions(BaseTestClass):
@responses.activate
def test_display_ongoing_challenges_when_challenge_is_not_publicly_available(
self
):
challenge_data = json.loads(challenge_response.challenges)
for i in range(len(challenge_data["results"])):
challenge_data["results"][i]["published"] = False
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_list.value),
json=challenge_data,
status=200,
)
runner = CliRunner()
expected = "Sorry, no challenges found."
result = runner.invoke(challenges, ["ongoing"])
assert result.output.strip() == expected
@responses.activate
def test_display_ongoing_challenges_when_challenge_is_not_approved_by_admin(
self
):
challenge_data = json.loads(challenge_response.challenges)
for i in range(len(challenge_data["results"])):
challenge_data["results"][i]["approved_by_admin"] = False
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_list.value),
json=challenge_data,
status=200,
)
runner = CliRunner()
expected = "Sorry, no challenges found."
result = runner.invoke(challenges, ["ongoing"])
assert result.output.strip() == expected
@responses.activate
def test_display_ongoing_challenges_when_challenge_is_not_active(self):
challenge_data = json.loads(challenge_response.challenges)
for i in range(len(challenge_data["results"])):
challenge_data["results"][i]["end_date"] = "2017-06-18T20:00:00Z"
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_list.value),
json=challenge_data,
status=200,
)
runner = CliRunner()
expected = "Sorry, no challenges found."
result = runner.invoke(challenges, ["ongoing"])
assert result.output.strip() == expected
class TestDisplayChallengesWithNoChallengeData(BaseTestClass):
def setup(self):
participant_team_data = json.loads(
challenge_response.challenge_participant_teams
)
host_team_data = json.loads(challenge_response.challenge_host_teams)
empty_leaderboard = json.loads(challenge_response.empty_leaderboard)
url = "{}{}"
challenges = (
'{"count": 2, "next": null, "previous": null,"results": []}'
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_list.value),
json=json.loads(challenges),
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_teams.value),
json=participant_team_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.host_teams.value),
json=host_team_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_challenges.value).format(
"3"
),
json=json.loads(challenges),
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.host_challenges.value).format("2"),
json=json.loads(challenges),
status=200,
)
responses.add(
responses.GET,
url.format(
API_HOST_URL, URLS.challenge_phase_split_detail.value
).format("1"),
json=[],
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.leaderboard.value).format("1"),
json=empty_leaderboard,
status=200,
)
self.output = "Sorry, no challenges found.\n"
@responses.activate
def test_display_all_challenge_lists_with_no_challenge_data(self):
runner = CliRunner()
result = runner.invoke(challenges)
response = result.output
assert response == self.output
@responses.activate
def test_display_host_challenge_list_with_no_challenge_data(self):
runner = CliRunner()
expected = "\nHosted Challenges\n\n"
self.output = "{}{}".format(expected, self.output)
result = runner.invoke(challenges, ["--host"])
response = result.output
assert response == self.output
@responses.activate
def test_display_participant_challenge_lists_with_no_challenge_data(self):
runner = CliRunner()
result = runner.invoke(challenges, ["--participant"])
response = result.output
assert response == self.output
@responses.activate
def test_display_participant_and_host_challenge_lists_with_no_challenge_data(
self
):
runner = CliRunner()
host_string = "\nHosted Challenges\n\n"
self.output = "{}{}{}".format(host_string, self.output, self.output)
result = runner.invoke(challenges, ["--participant", "--host"])
response = result.output
assert response == self.output
@responses.activate
def test_display_challenge_phase_splits_with_no_challenge_data(self):
runner = CliRunner()
result = runner.invoke(challenge, ["1", "phase", "2", "splits"])
response = result.output
assert response == "Sorry, no Challenge Phase Splits found.\n"
@responses.activate
def test_display_leaderboard_with_no_challenge_data(self):
runner = CliRunner()
result = runner.invoke(challenge, ["2", "leaderboard", "1"])
response = result.output.rstrip()
assert response == "Sorry, no Leaderboard results found."
class TestParticipantChallengesConditions(BaseTestClass):
@responses.activate
def test_display_participant_challenges_when_challenge_is_not_publicly_available(
self
):
challenge_data = json.loads(challenge_response.challenges)
participant_team_data = json.loads(
challenge_response.challenge_participant_teams
)
for i in range(len(challenge_data["results"])):
challenge_data["results"][i]["published"] = False
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_teams.value),
json=participant_team_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_challenges.value).format(
"3"
),
json=challenge_data,
status=200,
)
runner = CliRunner()
expected = "Sorry, no challenges found."
result = runner.invoke(challenges, ["--participant"])
assert result.output.strip() == expected
@responses.activate
def test_display_participant_challenges_when_challenge_is_not_approved_by_admin(
self
):
challenge_data = json.loads(challenge_response.challenges)
participant_team_data = json.loads(
challenge_response.challenge_participant_teams
)
for i in range(len(challenge_data["results"])):
challenge_data["results"][i]["approved_by_admin"] = False
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_teams.value),
json=participant_team_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_challenges.value).format(
"3"
),
json=challenge_data,
status=200,
)
runner = CliRunner()
expected = "Sorry, no challenges found."
result = runner.invoke(challenges, ["--participant"])
assert result.output.strip() == expected
@responses.activate
def test_display_participant_challenges_when_challenge_is_not_active(self):
challenge_data = json.loads(challenge_response.challenges)
participant_team_data = json.loads(
challenge_response.challenge_participant_teams
)
for i in range(len(challenge_data["results"])):
challenge_data["results"][i]["end_date"] = "2017-06-18T20:00:00Z"
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_teams.value),
json=participant_team_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_challenges.value).format(
"3"
),
json=challenge_data,
status=200,
)
runner = CliRunner()
expected = "Sorry, no challenges found."
result = runner.invoke(challenges, ["--participant"])
assert result.output.strip() == expected
class TestParticipantOrHostTeamChallenges(BaseTestClass):
def setup(self):
challenge_data = json.loads(challenge_response.challenges)
host_team_data = json.loads(challenge_response.challenge_host_teams)
participant_team_data = json.loads(
challenge_response.challenge_participant_teams
)
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_teams.value),
json=participant_team_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.host_teams.value),
json=host_team_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.participant_challenges.value).format(
"3"
),
json=challenge_data,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.host_challenges.value).format("2"),
json=challenge_data,
status=200,
)
challenges_json = challenge_data["results"]
table = BeautifulTable(max_width=200)
attributes = ["id", "title", "short_description"]
columns_attributes = [
"ID",
"Title",
"Short Description",
"Creator",
"Start Date",
"End Date",
]
table.column_headers = columns_attributes
for challenge_data in reversed(challenges_json):
values = list(map(lambda item: challenge_data[item], attributes))
creator = challenge_data["creator"]["team_name"]
start_date = convert_UTC_date_to_local(
challenge_data["start_date"]
)
end_date = convert_UTC_date_to_local(challenge_data["end_date"])
values.extend([creator, start_date, end_date])
table.append_row(values)
self.output = str(table)
@responses.activate
def test_display_host_challenge_list(self):
runner = CliRunner()
expected = "\nHosted Challenges\n\n"
self.output = "{}{}".format(expected, self.output)
result = runner.invoke(challenges, ["--host"])
response = result.output.rstrip()
assert response == self.output
@responses.activate
def test_display_participant_challenge_lists(self):
runner = CliRunner()
expected = "\nParticipated Challenges\n\n"
self.output = "{}{}".format(expected, self.output)
result = runner.invoke(challenges, ["--participant"])
response = result.output.rstrip()
assert response == self.output
@responses.activate
def test_display_participant_and_host_challenge_lists(self):
runner = CliRunner()
participant_string = "\nParticipated Challenges\n\n"
host_string = "\nHosted Challenges\n\n"
self.output = "{}{}\n{}{}".format(
host_string, self.output, participant_string, self.output
)
result = runner.invoke(challenges, ["--participant", "--host"])
response = result.output.rstrip()
assert response == self.output
class TestDisplayChallengePhases(BaseTestClass):
def setup(self):
challenge_phase_list_json = json.loads(
challenge_response.challenge_phase_list
)
challenge_phase_details_json = json.loads(
challenge_response.challenge_phase_details
)
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_phase_list.value).format(
"10"
),
json=challenge_phase_list_json,
status=200,
)
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.challenge_phase_detail.value).format(
"10", "20"
),
json=challenge_phase_details_json,
status=200,
)
self.phases = challenge_phase_list_json["results"]
self.phase = challenge_phase_details_json
@responses.activate
def test_display_challenge_phase_list(self):
table = BeautifulTable(max_width=150)
attributes = ["id", "name", "challenge"]
columns_attributes = [
"Phase ID",
"Phase Name",
"Challenge ID",
"Description",
]
table.column_headers = columns_attributes
for phase in self.phases:
values = list(map(lambda item: phase[item], attributes))
description = clean_data(phase["description"])
values.append(description)
table.append_row(values)
output = str(table)
runner = CliRunner()
result = runner.invoke(challenge, ["10", "phases"])
response = result.output.rstrip()
assert response == output
@responses.activate
def test_display_challenge_phase_detail(self):
phase = self.phase
phase_title = "\n{}".format(phase["name"])
challenge_id = "Challenge ID: {}".format(str(phase["challenge"]))
phase_id = "Phase ID: {}\n\n".format(str(phase["id"]))
title = "{} {} {}".format(phase_title, challenge_id, phase_id)
description = "{}\n".format(phase["description"])
start_date = "Start Date : " + phase["start_date"].split("T")[0]
start_date = "\n{}\n".format(start_date)
end_date = "End Date : " + phase["end_date"].split("T")[0]
end_date = "\n{}\n".format(end_date)
max_submissions_per_day = "\nMaximum Submissions per day : {}\n".format(
str(phase["max_submissions_per_day"])
)
max_submissions = "\nMaximum Submissions : {}\n".format(
str(phase["max_submissions"])
)
codename = "\nCode Name : {}\n".format(phase["codename"])
leaderboard_public = "\nLeaderboard Public : {}\n".format(
phase["leaderboard_public"]
)
is_active = "\nActive : {}\n".format(phase["is_active"])
is_public = "\nPublic : {}\n".format(phase["is_public"])
phase = "{}{}{}{}{}{}{}{}{}{}\n".format(
title,
description,
start_date,
end_date,
max_submissions_per_day,
max_submissions,
leaderboard_public,
codename,
is_active,
is_public,
)
runner = CliRunner()
result = runner.invoke(challenge, ["10", "phase", "20"])
response = result.output
assert response == phase
@responses.activate
def test_display_challenge_phase_detail_with_json_flag(self):
expected = json.dumps(self.phase, indent=4, sort_keys=True)
runner = CliRunner()
result = runner.invoke(challenge, ["10", "phase", "20", "--json"])
response = result.output.strip()
assert response == expected
class TestDisplaySubmission(BaseTestClass):
def setup(self):
json_data = json.loads(submission_response.submission)
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.my_submissions.value).format(
"3", "7"
),
json=json_data,
status=200,
)
self.submissions = json_data["results"]
@responses.activate
def test_display_my_submission_details(self):
table = BeautifulTable(max_width=100)
attributes = [
"id",
"participant_team_name",
"execution_time",
"status",
]
columns_attributes = [
"ID",
"Participant Team",
"Execution Time(sec)",
"Status",
"Submitted At",
"Method Name",
]
table.column_headers = columns_attributes
for submission in self.submissions:
# Format date
date = datetime.strptime(
submission["submitted_at"], "%Y-%m-%dT%H:%M:%S.%fZ"
)
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
# Convert to local timezone from UTC.
date = date.replace(tzinfo=from_zone)
converted_date = date.astimezone(to_zone)
date = converted_date.strftime("%D %r")
# Check for empty method name
method_name = (
submission["method_name"]
if submission["method_name"]
else "None"
)
values = list(map(lambda item: submission[item], attributes))
values.append(date)
values.append(method_name)
table.append_row(values)
output = str(table).rstrip()
runner = CliRunner()
result = runner.invoke(challenge, ["3", "phase", "7", "submissions"])
response = result.output.rstrip()
assert response == output
@responses.activate
def test_display_my_submission_details_with_single_argument(self):
output = (
"Usage: challenge phase [OPTIONS] PHASE COMMAND [ARGS]...\n"
'\nError: Invalid value for "PHASE": submissions is not a valid integer\n'
)
runner = CliRunner()
result = runner.invoke(challenge, ["2", "phase", "submissions"])
response = result.output
assert response == output
@responses.activate
def test_display_my_submission_details_with_start_date(self):
table = BeautifulTable(max_width=100)
attributes = [
"id",
"participant_team_name",
"execution_time",
"status",
]
columns_attributes = [
"ID",
"Participant Team",
"Execution Time(sec)",
"Status",
"Submitted At",
"Method Name",
]
table.column_headers = columns_attributes
start_date = datetime.strptime("6/7/18", "%m/%d/%y")
end_date = datetime.max
for submission in self.submissions:
date = validate_date_format(submission["submitted_at"])
if date >= start_date and date <= end_date:
# Check for empty method name
date = convert_UTC_date_to_local(submission["submitted_at"])
method_name = (
submission["method_name"]
if submission["method_name"]
else "None"
)
values = list(map(lambda item: submission[item], attributes))
values.append(date)
values.append(method_name)
table.append_row(values)
output = str(table).rstrip()
runner = CliRunner()
result = runner.invoke(
challenge, ["3", "phase", "7", "submissions", "-s", "6/7/18"]
)
response = result.output.rstrip()
assert response == output
@responses.activate
def test_display_my_submission_details_with_end_date(self):
table = BeautifulTable(max_width=100)
attributes = [
"id",
"participant_team_name",
"execution_time",
"status",
]
columns_attributes = [
"ID",
"Participant Team",
"Execution Time(sec)",
"Status",
"Submitted At",
"Method Name",
]
table.column_headers = columns_attributes
start_date = datetime.min
end_date = datetime.strptime("6/7/18", "%m/%d/%y")
for submission in self.submissions:
date = validate_date_format(submission["submitted_at"])
if date >= start_date and date <= end_date:
# Check for empty method name
date = convert_UTC_date_to_local(submission["submitted_at"])
method_name = (
submission["method_name"]
if submission["method_name"]
else "None"
)
values = list(map(lambda item: submission[item], attributes))
values.append(date)
values.append(method_name)
table.append_row(values)
output = str(table).rstrip()
runner = CliRunner()
result = runner.invoke(
challenge, ["3", "phase", "7", "submissions", "-e", "6/7/18"]
)
response = result.output.rstrip()
assert response == output
@responses.activate
def test_display_my_submission_details_with_end_date_and_start_date(self):
table = BeautifulTable(max_width=100)
attributes = [
"id",
"participant_team_name",
"execution_time",
"status",
]
columns_attributes = [
"ID",
"Participant Team",
"Execution Time(sec)",
"Status",
"Submitted At",
"Method Name",
]
table.column_headers = columns_attributes
start_date = datetime.strptime("6/5/18", "%m/%d/%y")
end_date = datetime.strptime("6/9/18", "%m/%d/%y")
for submission in self.submissions:
date = validate_date_format(submission["submitted_at"])
if date >= start_date and date <= end_date:
# Check for empty method name
date = convert_UTC_date_to_local(submission["submitted_at"])
method_name = (
submission["method_name"]
if submission["method_name"]
else "None"
)
values = list(map(lambda item: submission[item], attributes))
values.append(date)
values.append(method_name)
table.append_row(values)
output = str(table).rstrip()
runner = CliRunner()
result = runner.invoke(
challenge,
["3", "phase", "7", "submissions", "-s", "6/5/18", "-e", "6/9/18"],
)
response = result.output.rstrip()
assert response == output
@responses.activate
def test_display_my_submission_details_with_end_date_and_start_date_without_submissions(
self
):
output = "Sorry, no submissions were made during this time period."
runner = CliRunner()
result = runner.invoke(
challenge,
[
"3",
"phase",
"7",
"submissions",
"-s",
"6/10/18",
"-e",
"6/15/18",
],
)
response = result.output.strip()
assert response == output
class TestDisplayLeaderboard(BaseTestClass):
def setup(self):
json_data = json.loads(challenge_response.leaderboard)
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.leaderboard.value).format("1"),
json=json_data,
status=200,
)
self.leaderboard = json_data["results"]
@responses.activate
def test_display_leaderboard(self):
attributes = self.leaderboard[0]["leaderboard__schema"]["labels"]
table = BeautifulTable(max_width=150)
attributes = (
["Rank", "Participant Team"] + attributes + ["Last Submitted"]
)
attributes = list(map(lambda item: str(item), attributes))
table.column_headers = attributes
for rank, result in enumerate(self.leaderboard, start=1):
name = result["submission__participant_team__team_name"]
scores = result["result"]
last_submitted = convert_UTC_date_to_local(
result["submission__submitted_at"]
)
value = [rank, name] + scores + [last_submitted]
table.append_row(value)
output = str(table).rstrip()
runner = CliRunner()
result = runner.invoke(challenge, ["2", "leaderboard", "1"])
response = result.output.rstrip()
assert response == output
@responses.activate
def test_test_display_leaderboard_with_string_argument(self):
output = (
"Usage: challenge leaderboard [OPTIONS] CPS\n"
'\nError: Invalid value for "CPS": two is not a valid integer\n'
)
runner = CliRunner()
result = runner.invoke(challenge, ["2", "leaderboard", "two"])
response = result.output
assert response == output
@responses.activate
def test_display_leaderboard_with_single_argument(self):
output = (
"Usage: challenge leaderboard [OPTIONS] CPS\n"
'\nError: Missing argument "CPS".\n'
)
runner = CliRunner()
result = runner.invoke(challenge, ["2", "leaderboard"])
response = result.output
assert response == output
class TestDisplayChallengePhaseSplit(BaseTestClass):
def setup(self):
json_data = json.loads(challenge_response.challenge_phase_splits)
url = "{}{}"
responses.add(
responses.GET,
url.format(
API_HOST_URL, URLS.challenge_phase_split_detail.value
).format("1"),
json=json_data,
status=200,
)
self.splits = json_data
@responses.activate
def test_display_challenge_phase_split(self):
output = ""
table = BeautifulTable(max_width=100)
attributes = ["id", "dataset_split_name", "challenge_phase_name"]
columns_attributes = [
"Challenge Phase ID",
"Dataset Split",
"Challenge Phase Name",
]
table.column_headers = columns_attributes
for split in self.splits:
if split["visibility"] == 3:
values = list(map(lambda item: split[item], attributes))
table.append_row(values)
output = str(table)
runner = CliRunner()
result = runner.invoke(challenge, ["1", "phase", "2", "splits"])
response = result.output.rstrip()
assert response == output
@responses.activate
def test_display_challenge_phase_split_list_with_a_single_argument(self):
output = (
"Usage: challenge phase [OPTIONS] PHASE COMMAND [ARGS]...\n"
'\nError: Missing argument "PHASE".\n'
)
runner = CliRunner()
result = runner.invoke(challenge, ["2", "phase"])
response = result.output
assert response == output
@responses.activate
def test_display_my_submission_details_with_string_argument(self):
output = (
"Usage: challenge phase [OPTIONS] PHASE COMMAND [ARGS]...\n"
'\nError: Invalid value for "PHASE": two is not a valid integer\n'
)
runner = CliRunner()
result = runner.invoke(challenge, ["2", "phase", "two"])
response = result.output
assert response == output
class TestDisplaySubmissionWithoutSubmissionData(BaseTestClass):
def setup(self):
data = '{"count": 4, "next": null, "previous": null, "results": []}'
json_data = json.loads(data)
url = "{}{}"
responses.add(
responses.GET,
url.format(API_HOST_URL, URLS.my_submissions.value).format(
"3", "7"
),
json=json_data,
status=200,
)
@responses.activate
def test_display_my_submission_details_without_submissions(self):
expected = "\nSorry, you have not made any submissions to this challenge phase."
runner = CliRunner()
result = runner.invoke(challenge, ["3", "phase", "7", "submissions"])
response = result.output.rstrip()
assert response == expected
@responses.activate
def test_display_challenge_phase_split_list_with_string_argument(self):
output = (
"Usage: challenge [OPTIONS] CHALLENGE COMMAND [ARGS]...\n"
'\nError: Invalid value for "CHALLENGE": two is not a valid integer\n'
)
runner = CliRunner()
result = runner.invoke(challenge, ["two", "participate", "3"])
response = result.output
assert response == output
| 32.853751
| 92
| 0.579477
| 3,419
| 34,595
| 5.643463
| 0.071073
| 0.037056
| 0.038352
| 0.046022
| 0.818139
| 0.796631
| 0.767608
| 0.736978
| 0.69334
| 0.657735
| 0
| 0.011024
| 0.313022
| 34,595
| 1,052
| 93
| 32.884981
| 0.80085
| 0.004596
| 0
| 0.655556
| 0
| 0
| 0.11287
| 0.006797
| 0
| 0
| 0
| 0
| 0.041111
| 1
| 0.051111
| false
| 0
| 0.013333
| 0
| 0.076667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
433884aa1bcd9d238d8e5400ea06c2a344277f46
| 50
|
py
|
Python
|
network/__init__.py
|
jyunlee/Pop-Out-Motion
|
29a477770f02a7a35c16d5f4a48d014cd357d94b
|
[
"MIT"
] | 63
|
2022-03-13T12:26:12.000Z
|
2022-03-31T14:07:12.000Z
|
network/__init__.py
|
jyunlee/Pop-Out-Motion
|
29a477770f02a7a35c16d5f4a48d014cd357d94b
|
[
"MIT"
] | null | null | null |
network/__init__.py
|
jyunlee/Pop-Out-Motion
|
29a477770f02a7a35c16d5f4a48d014cd357d94b
|
[
"MIT"
] | 1
|
2022-03-20T07:29:01.000Z
|
2022-03-20T07:29:01.000Z
|
from point_transformer import data, models, utils
| 25
| 49
| 0.84
| 7
| 50
| 5.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 50
| 1
| 50
| 50
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4a2aea804e16bf5ecfb96b640dfb2efc5c4b9df7
| 76
|
py
|
Python
|
microvk/__init__.py
|
Yazawasempai/IrCA-Duty
|
128fddea9824ea00cc0f61e79e9e77797fdce04f
|
[
"MIT"
] | null | null | null |
microvk/__init__.py
|
Yazawasempai/IrCA-Duty
|
128fddea9824ea00cc0f61e79e9e77797fdce04f
|
[
"MIT"
] | null | null | null |
microvk/__init__.py
|
Yazawasempai/IrCA-Duty
|
128fddea9824ea00cc0f61e79e9e77797fdce04f
|
[
"MIT"
] | 1
|
2020-10-12T18:14:49.000Z
|
2020-10-12T18:14:49.000Z
|
from .api import VkApi, VkApiResponseException
from .user_longpoll import LP
| 38
| 46
| 0.855263
| 10
| 76
| 6.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 76
| 2
| 47
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4a9fc733fdf4496e246602c01bae8ca4e6c6e4a4
| 38
|
py
|
Python
|
torchutils/losses/__init__.py
|
tchaye59/torchutils
|
ca7b01bf63b6c3adaa36a4a66dfd87e927ef2460
|
[
"MIT"
] | null | null | null |
torchutils/losses/__init__.py
|
tchaye59/torchutils
|
ca7b01bf63b6c3adaa36a4a66dfd87e927ef2460
|
[
"MIT"
] | null | null | null |
torchutils/losses/__init__.py
|
tchaye59/torchutils
|
ca7b01bf63b6c3adaa36a4a66dfd87e927ef2460
|
[
"MIT"
] | null | null | null |
from torchutils.losses.losses import *
| 38
| 38
| 0.842105
| 5
| 38
| 6.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4aa321ebb845a87ceab1cbb4986cc771cfa586ed
| 164
|
py
|
Python
|
hendrix/test/wsgi.py
|
anthonyalmarza/hendrix
|
eebd2a2183cc18ec2267d96a53a70d41b1630ce6
|
[
"MIT"
] | null | null | null |
hendrix/test/wsgi.py
|
anthonyalmarza/hendrix
|
eebd2a2183cc18ec2267d96a53a70d41b1630ce6
|
[
"MIT"
] | null | null | null |
hendrix/test/wsgi.py
|
anthonyalmarza/hendrix
|
eebd2a2183cc18ec2267d96a53a70d41b1630ce6
|
[
"MIT"
] | null | null | null |
def application(environ, start_response):
"""Basic WSGI Application"""
start_response('200 OK', [('Content-type','text/plain')])
return ['Hello World!']
| 41
| 61
| 0.670732
| 19
| 164
| 5.684211
| 0.842105
| 0.240741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.140244
| 164
| 4
| 62
| 41
| 0.744681
| 0.134146
| 0
| 0
| 0
| 0
| 0.291971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
4ab49297a840ae292347fda7244ab5aea46f695b
| 164
|
py
|
Python
|
install_script.py
|
carldcox/Pi-ansible-playbooks
|
49f8726bac97690fce4ff17f9175020aeac44791
|
[
"BSD-2-Clause"
] | null | null | null |
install_script.py
|
carldcox/Pi-ansible-playbooks
|
49f8726bac97690fce4ff17f9175020aeac44791
|
[
"BSD-2-Clause"
] | null | null | null |
install_script.py
|
carldcox/Pi-ansible-playbooks
|
49f8726bac97690fce4ff17f9175020aeac44791
|
[
"BSD-2-Clause"
] | null | null | null |
import bpy
bpy.ops.wm.addon_install(overwrite=True, filepath="/home/pi/cr_024_bl279.zip")
bpy.ops.wm.addon_enable(module='crowdrender')
bpy.ops.wm.save_userpref()
| 27.333333
| 78
| 0.792683
| 28
| 164
| 4.464286
| 0.714286
| 0.144
| 0.192
| 0.208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038217
| 0.042683
| 164
| 5
| 79
| 32.8
| 0.757962
| 0
| 0
| 0
| 0
| 0
| 0.220859
| 0.153374
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
43499104c1234fc2b6f7b5f028dcb6305a1837ea
| 165
|
py
|
Python
|
goods/admin.py
|
iamforeverme/kshop
|
2ef4e06dfe1b22748d52342f1cf9646102999d20
|
[
"MIT"
] | null | null | null |
goods/admin.py
|
iamforeverme/kshop
|
2ef4e06dfe1b22748d52342f1cf9646102999d20
|
[
"MIT"
] | 1
|
2016-10-22T06:56:29.000Z
|
2016-10-23T13:49:40.000Z
|
goods/admin.py
|
iamforeverme/kshop
|
2ef4e06dfe1b22748d52342f1cf9646102999d20
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from models import Producer, Brand, Product
admin.site.register(Producer)
admin.site.register(Brand)
admin.site.register(Product)
| 20.625
| 43
| 0.818182
| 23
| 165
| 5.869565
| 0.478261
| 0.2
| 0.377778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 165
| 7
| 44
| 23.571429
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
435501c868761300ad95ea7a227083e62048f249
| 23
|
py
|
Python
|
GUI.py
|
Boro2001/dobblethon
|
71370489a8a6675bef4eb362736ba2ae18f353ba
|
[
"MIT"
] | null | null | null |
GUI.py
|
Boro2001/dobblethon
|
71370489a8a6675bef4eb362736ba2ae18f353ba
|
[
"MIT"
] | null | null | null |
GUI.py
|
Boro2001/dobblethon
|
71370489a8a6675bef4eb362736ba2ae18f353ba
|
[
"MIT"
] | null | null | null |
#Place for our GUI code
| 23
| 23
| 0.782609
| 5
| 23
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0.956522
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4356d4382ebacb151b5b6c197dbdfedf320ecf13
| 115
|
py
|
Python
|
authman/view/test.py
|
mshahmalaki/authman
|
2807cbf32f240e893e93ed076f10ee56666fed75
|
[
"MIT"
] | 6
|
2020-10-03T14:25:43.000Z
|
2022-03-06T08:05:58.000Z
|
authman/view/test.py
|
mshahmalaki/authman
|
2807cbf32f240e893e93ed076f10ee56666fed75
|
[
"MIT"
] | null | null | null |
authman/view/test.py
|
mshahmalaki/authman
|
2807cbf32f240e893e93ed076f10ee56666fed75
|
[
"MIT"
] | 6
|
2020-10-18T23:24:15.000Z
|
2021-02-22T08:01:33.000Z
|
from flask_restx import Resource
class TestResource(Resource):
def get(self):
return {"test": "ok"}
| 14.375
| 32
| 0.66087
| 14
| 115
| 5.357143
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226087
| 115
| 7
| 33
| 16.428571
| 0.842697
| 0
| 0
| 0
| 0
| 0
| 0.052174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
439c7b56404ad7d347d9e5173c55cf71f39911da
| 1,406
|
py
|
Python
|
taskie/api/users/serializers.py
|
Antoxerrr/taskie_server
|
9eb942096add76a218518f0f06457b4b459ba733
|
[
"MIT"
] | null | null | null |
taskie/api/users/serializers.py
|
Antoxerrr/taskie_server
|
9eb942096add76a218518f0f06457b4b459ba733
|
[
"MIT"
] | null | null | null |
taskie/api/users/serializers.py
|
Antoxerrr/taskie_server
|
9eb942096add76a218518f0f06457b4b459ba733
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from rest_framework import serializers
User = get_user_model()
class UserSerializer(serializers.Serializer):
"""Сериализатор пользователя."""
username = serializers.CharField(required=True)
email = serializers.EmailField(required=True)
first_name = serializers.CharField(
required=False, allow_blank=True, allow_null=True
)
last_name = serializers.CharField(
required=False, allow_blank=True, allow_null=True
)
class RegisterSerializer(serializers.Serializer):
"""Сериализатор регистрации пользователя."""
username = serializers.CharField(required=True)
email = serializers.EmailField(required=True)
password = serializers.CharField(required=True)
password_confirmation = serializers.CharField(required=True)
first_name = serializers.CharField(
required=False, allow_blank=True, allow_null=True
)
last_name = serializers.CharField(
required=False, allow_blank=True, allow_null=True
)
class LoginSerializer(serializers.Serializer):
"""Сериализатор авторизации пользователя."""
username = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class LoginResponseSerializer(serializers.Serializer):
"""Сериализатор ответа на авторизацию."""
token = serializers.CharField()
user = UserSerializer()
| 29.291667
| 64
| 0.747511
| 139
| 1,406
| 7.431655
| 0.28777
| 0.212972
| 0.271055
| 0.185866
| 0.596321
| 0.588577
| 0.549855
| 0.460794
| 0.460794
| 0.460794
| 0
| 0
| 0.158606
| 1,406
| 47
| 65
| 29.914894
| 0.873204
| 0.099573
| 0
| 0.517241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.103448
| 0.068966
| 0
| 0.689655
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
43ae0bfb8d7cb80316ea3a4862c1697ded97ee67
| 131
|
py
|
Python
|
deep_learn/dataset/text/__init__.py
|
ImbesatRizvi/Accio
|
b0ad2d245f4f7c42d85b722db9fad435c0d06a99
|
[
"Apache-2.0"
] | 2
|
2019-07-30T09:39:53.000Z
|
2019-07-30T09:40:06.000Z
|
deep_learn/dataset/text/__init__.py
|
ImbesatRizvi/Accio
|
b0ad2d245f4f7c42d85b722db9fad435c0d06a99
|
[
"Apache-2.0"
] | null | null | null |
deep_learn/dataset/text/__init__.py
|
ImbesatRizvi/Accio
|
b0ad2d245f4f7c42d85b722db9fad435c0d06a99
|
[
"Apache-2.0"
] | 2
|
2018-11-07T22:45:29.000Z
|
2019-10-24T09:53:41.000Z
|
from .PyMagnitudeField import PyMagnitudeField
from .RevField import RevField
from .utils import vocab_from_vectors, special_tokens
| 43.666667
| 53
| 0.877863
| 16
| 131
| 7
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091603
| 131
| 3
| 53
| 43.666667
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
78d886e5f06a4c4dc07dff2408746e3a19b4cb85
| 4,291
|
py
|
Python
|
tests/functional/test_leave.py
|
panchiwalashivani/python
|
eacafb9c597d04da3ac306809939045a17611d69
|
[
"MIT"
] | 1
|
2021-09-29T05:07:57.000Z
|
2021-09-29T05:07:57.000Z
|
tests/functional/test_leave.py
|
panchiwalashivani/python
|
eacafb9c597d04da3ac306809939045a17611d69
|
[
"MIT"
] | null | null | null |
tests/functional/test_leave.py
|
panchiwalashivani/python
|
eacafb9c597d04da3ac306809939045a17611d69
|
[
"MIT"
] | null | null | null |
import unittest
from pubnub.endpoints.presence.leave import Leave
from pubnub.managers import TelemetryManager
try:
from mock import MagicMock
except ImportError:
from unittest.mock import MagicMock
from pubnub.pubnub import PubNub
from tests.helper import pnconf, sdk_name
class TestLeave(unittest.TestCase):
def setUp(self):
self.pubnub = MagicMock(
spec=PubNub,
config=pnconf,
sdk_name=sdk_name,
uuid=None
)
self.pubnub.uuid = "UUID_SubscribeUnitTest"
self.pubnub._telemetry_manager = TelemetryManager()
self.leave = Leave(self.pubnub)
def test_leave_single_channel(self):
self.leave.channels('ch')
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH % (pnconf.subscribe_key, "ch"))
self.assertEqual(self.leave.build_params_callback()({}), {
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid
})
self.assertEqual(self.leave._channels, ['ch'])
def test_leave_multiple_channels(self):
self.leave.channels("ch1,ch2,ch3")
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH % (pnconf.subscribe_key, "ch1,ch2,ch3"))
self.assertEqual(self.leave.build_params_callback()({}), {
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid
})
self.assertEqual(self.leave._channels, ['ch1', 'ch2', 'ch3'])
def test_leave_multiple_channels_using_list(self):
self.leave.channels(['ch1', 'ch2', 'ch3'])
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH % (pnconf.subscribe_key, "ch1,ch2,ch3"))
self.assertEqual(self.leave.build_params_callback()({}), {
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid
})
self.assertEqual(self.leave._channels, ['ch1', 'ch2', 'ch3'])
def test_leave_multiple_channels_using_tuple(self):
self.leave.channels(('ch1', 'ch2', 'ch3'))
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH % (pnconf.subscribe_key, "ch1,ch2,ch3"))
self.assertEqual(self.leave.build_params_callback()({}), {
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid
})
self.assertEqual(self.leave._channels, ['ch1', 'ch2', 'ch3'])
def test_leave_single_group(self):
self.leave.channel_groups("gr")
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH
% (pnconf.subscribe_key, ","))
self.assertEqual(self.leave.build_params_callback()({}), {
'channel-group': 'gr',
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid
})
self.assertEqual(self.leave._groups, ['gr'])
def test_leave_multiple_groups_using_string(self):
self.leave.channel_groups("gr1,gr2,gr3")
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH
% (pnconf.subscribe_key, ","))
self.assertEqual(self.leave.build_params_callback()({}), {
'channel-group': 'gr1,gr2,gr3',
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid
})
self.assertEqual(self.leave._groups, ['gr1', 'gr2', 'gr3'])
def test_leave_multiple_groups_using_list(self):
self.leave.channel_groups(['gr1', 'gr2', 'gr3'])
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH
% (pnconf.subscribe_key, ","))
self.assertEqual(self.leave.build_params_callback()({}), {
'channel-group': 'gr1,gr2,gr3',
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid
})
self.assertEqual(self.leave._groups, ['gr1', 'gr2', 'gr3'])
def test_leave_channels_and_groups(self):
self.leave.channels('ch1,ch2').channel_groups(["gr1", "gr2"])
self.assertEqual(self.leave.build_path(), Leave.LEAVE_PATH
% (pnconf.subscribe_key, "ch1,ch2"))
self.assertEqual(self.leave.build_params_callback()({}), {
'pnsdk': sdk_name,
'uuid': self.pubnub.uuid,
'channel-group': 'gr1,gr2',
})
self.assertEqual(self.leave._groups, ['gr1', 'gr2'])
self.assertEqual(self.leave._channels, ['ch1', 'ch2'])
| 32.755725
| 107
| 0.606385
| 483
| 4,291
| 5.180124
| 0.124224
| 0.122302
| 0.189848
| 0.239808
| 0.779776
| 0.754996
| 0.720224
| 0.685851
| 0.685851
| 0.685851
| 0
| 0.017603
| 0.245397
| 4,291
| 130
| 108
| 33.007692
| 0.755096
| 0
| 0
| 0.516129
| 0
| 0
| 0.081333
| 0.005127
| 0
| 0
| 0
| 0
| 0.268817
| 1
| 0.096774
| false
| 0
| 0.086022
| 0
| 0.193548
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
60172bc8cbdf549aecec4948ad290b9df9c1ef84
| 67
|
py
|
Python
|
NsenBot.py
|
SlashNephy/NsenBot
|
8c308b0e63e3d8e71280bc82e6bd8a7debd6ee10
|
[
"MIT"
] | 1
|
2016-11-18T06:31:48.000Z
|
2016-11-18T06:31:48.000Z
|
NsenBot.py
|
SlashNephy/NsenBot
|
8c308b0e63e3d8e71280bc82e6bd8a7debd6ee10
|
[
"MIT"
] | null | null | null |
NsenBot.py
|
SlashNephy/NsenBot
|
8c308b0e63e3d8e71280bc82e6bd8a7debd6ee10
|
[
"MIT"
] | 2
|
2017-04-27T12:23:58.000Z
|
2022-02-23T01:27:10.000Z
|
# coding=utf-8
from NsenMusicBot import Bot
bot = Bot()
bot.run()
| 11.166667
| 28
| 0.701493
| 11
| 67
| 4.272727
| 0.727273
| 0.382979
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 0.164179
| 67
| 5
| 29
| 13.4
| 0.821429
| 0.179104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
60181b4ef33adc2c5df23d959a1470bb09f8423e
| 140
|
py
|
Python
|
fypress/models/__init__.py
|
Fy-/FyPress
|
be357134969797f92b94db77c9e3aa863c0094fe
|
[
"MIT"
] | 70
|
2016-06-07T10:17:02.000Z
|
2021-06-23T05:36:03.000Z
|
fypress/models/__init__.py
|
Fy-/FyPress
|
be357134969797f92b94db77c9e3aa863c0094fe
|
[
"MIT"
] | 10
|
2016-06-24T08:17:54.000Z
|
2020-07-24T07:34:39.000Z
|
fypress/models/__init__.py
|
Fy-/FyPress
|
be357134969797f92b94db77c9e3aa863c0094fe
|
[
"MIT"
] | 15
|
2016-06-16T20:40:55.000Z
|
2019-08-06T02:45:52.000Z
|
from fysql import Table
from fypress import FyPress
fypress = FyPress()
class FyPressTables(Table):
db = fypress.database.db
| 15.555556
| 29
| 0.714286
| 17
| 140
| 5.882353
| 0.529412
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221429
| 140
| 8
| 30
| 17.5
| 0.917431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
602669ad8e59f740204e0cd3d551625b2c66a892
| 99
|
py
|
Python
|
ilpexp/problem/hexose/__init__.py
|
logic-and-learning-lab/Popper-experiments
|
94d7499e32c3c9b01da5fd53cddef8a8afa8d509
|
[
"MIT"
] | 3
|
2022-01-30T09:51:17.000Z
|
2022-03-13T20:04:09.000Z
|
ilpexp/problem/hexose/__init__.py
|
logic-and-learning-lab/Popper-experiments
|
94d7499e32c3c9b01da5fd53cddef8a8afa8d509
|
[
"MIT"
] | 5
|
2022-01-30T09:38:12.000Z
|
2022-01-31T08:34:49.000Z
|
ilpexp/problem/hexose/__init__.py
|
logic-and-learning-lab/Popper-experiments
|
94d7499e32c3c9b01da5fd53cddef8a8afa8d509
|
[
"MIT"
] | null | null | null |
from .hexose import Hexose
# HEXOSE_PROBLEM = Hexose('atoms')
HEXOSE_PROBLEM = Hexose('aminoacid')
| 24.75
| 36
| 0.767677
| 12
| 99
| 6.166667
| 0.5
| 0.351351
| 0.513514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 99
| 4
| 36
| 24.75
| 0.840909
| 0.323232
| 0
| 0
| 0
| 0
| 0.136364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
604014757967934fcaad91dd1e128d63a324ca44
| 124
|
py
|
Python
|
python/dgl/distgnn/__init__.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 9,516
|
2018-12-08T22:11:31.000Z
|
2022-03-31T13:04:33.000Z
|
python/dgl/distgnn/__init__.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,494
|
2018-12-08T22:43:00.000Z
|
2022-03-31T21:16:27.000Z
|
python/dgl/distgnn/__init__.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,529
|
2018-12-08T22:56:14.000Z
|
2022-03-31T13:07:41.000Z
|
"""
This package contains DistGNN and Libra based graph partitioning tools.
"""
from . import partition
from . import tools
| 20.666667
| 71
| 0.766129
| 16
| 124
| 5.9375
| 0.8125
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 124
| 5
| 72
| 24.8
| 0.913462
| 0.572581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
604b7850cee1163bfe5fa3b0457d4fd713056417
| 106,602
|
py
|
Python
|
maddress/domain_dict.py
|
scottdraper8/maddress
|
80d95dcc40e615fb0ed247fb2b5196bf50221726
|
[
"MIT"
] | 2
|
2021-07-20T22:54:40.000Z
|
2022-02-23T09:13:58.000Z
|
maddress/domain_dict.py
|
scottdraper8/maddress
|
80d95dcc40e615fb0ed247fb2b5196bf50221726
|
[
"MIT"
] | null | null | null |
maddress/domain_dict.py
|
scottdraper8/maddress
|
80d95dcc40e615fb0ed247fb2b5196bf50221726
|
[
"MIT"
] | 1
|
2021-08-24T08:48:01.000Z
|
2021-08-24T08:48:01.000Z
|
domains = {
'generic': ['ak.gov', 'ak.us', 'al.gov', 'al.us', 'ar.gov', 'ar.us', 'az.gov', 'az.us', 'ca.gov', 'ca.us', 'co.gov', 'co.us', 'ct.gov', 'ct.us', 'dc.gov', 'dc.us', 'de.gov', 'de.us', 'fl.gov', 'fl.us', 'ga.gov', 'ga.us', 'hi.gov', 'hi.us', 'ia.gov', 'ia.us', 'id.gov', 'id.us', 'il.gov', 'il.us', 'in.gov', 'in.us', 'k12.ak.us', 'k12.al.us', 'k12.ar.us', 'k12.az.us', 'k12.ca.us', 'k12.co.us', 'k12.ct.us', 'k12.dc.us', 'k12.de.us', 'k12.fl.us', 'k12.ga.us', 'k12.hi.us', 'k12.ia.us', 'k12.id.us', 'k12.il.us', 'k12.in.us', 'k12.ks.us', 'k12.ky.us', 'k12.la.us', 'k12.ma.us', 'k12.md.us', 'k12.me.us', 'k12.mi.us', 'k12.mn.us', 'k12.mo.us', 'k12.ms.us', 'k12.mt.us', 'k12.nc.us', 'k12.nd.us', 'k12.ne.us', 'k12.nh.us', 'k12.nj.us', 'k12.nm.us', 'k12.nv.us', 'k12.ny.us', 'k12.oh.us', 'k12.ok.us', 'k12.or.us', 'k12.pa.us', 'k12.ri.us', 'k12.sc.us', 'k12.sd.us', 'k12.tn.us', 'k12.tx.us', 'k12.ut.us', 'k12.va.us', 'k12.vt.us', 'k12.wa.us', 'k12.wi.us', 'k12.wv.us', 'k12.wy.us', 'ks.gov', 'ks.us', 'ky.gov', 'ky.us', 'kyschools.us', 'la.gov', 'la.us', 'ma.gov', 'ma.us', 'md.gov', 'md.us', 'me.gov', 'me.us', 'mi.gov', 'mi.us', 'mn.gov', 'mn.us', 'mo.gov', 'mo.us', 'ms.gov', 'ms.us', 'mt.gov', 'mt.us', 'nc.gov', 'nc.us', 'nd.gov', 'nd.us', 'ne.gov', 'ne.us', 'nh.gov', 'nh.us', 'nj.gov', 'nj.us', 'nm.gov', 'nm.us', 'nv.gov', 'nv.us', 'ny.gov', 'ny.us', 'oh.gov', 'oh.us', 'ok.gov', 'ok.us', 'or.gov', 'or.us', 'pa.gov', 'pa.us', 'ri.gov', 'ri.us', 'sc.gov', 'sc.us', 'schools.nyc.gov', 'sd.gov', 'sd.us', 'tn.gov', 'tn.us', 'tx.gov', 'tx.us', 'uscourts.gov', 'ut.gov', 'ut.us', 'va.gov', 'va.us', 'vt.gov', 'vt.us', 'wa.gov', 'wa.us', 'wi.gov', 'wi.us', 'wv.gov', 'wv.us', 'wy.gov', 'wy.us'],
'personal': ['0-mail.com', '007addict.com', '020.co.uk', '027168.com', '0815.ru', '0815.su', '0clickemail.com', '0sg.net', '0wnd.net', '0wnd.org', '1033edge.com', '10mail.org', '10minutemail.co.za', '10minutemail.com', '11mail.com', '123-m.com', '123.com', '123box.net', '123india.com', '123mail.cl', '123mail.org', '123qwe.co.uk', '126.com', '126.net', '138mail.com', '139.com', '150mail.com', '150ml.com', '15meg4free.com', '163.com', '16mail.com', '188.com', '189.cn', '1auto.com', '1ce.us', '1chuan.com', '1colony.com', '1coolplace.com', '1email.eu', '1freeemail.com', '1fsdfdsfsdf.tk', '1funplace.com', '1internetdrive.com', '1mail.ml', '1mail.net', '1me.net', '1mum.com', '1musicrow.com', '1netdrive.com', '1nsyncfan.com', '1pad.de', '1under.com', '1webave.com', '1webhighway.com', '1zhuan.com', '2-mail.com', '20email.eu', '20mail.in', '20mail.it', '20minutemail.com', '212.com', '21cn.com', '247emails.com', '24horas.com', '2911.net', '2980.com', '2bmail.co.uk', '2coolforyou.net', '2d2i.com', '2die4.com', '2fdgdfgdfgdf.tk', '2hotforyou.net', '2mydns.com', '2net.us', '2prong.com', '2trom.com', '3000.it', '30minutemail.com', '30minutesmail.com', '3126.com', '321media.com', '33mail.com', '360.ru', '37.com', '3ammagazine.com', '3dmail.com', '3email.com', '3g.ua', '3mail.ga', '3trtretgfrfe.tk', '3xl.net', '444.net', '4email.com', '4email.net', '4gfdsgfdgfd.tk', '4mg.com', '4newyork.com', '4warding.com', '4warding.net', '4warding.org', '4x4fan.com', '4x4man.com', '50mail.com', '5fm.za.com', '5ghgfhfghfgh.tk', '5iron.com', '5star.com', '60minutemail.com', '6hjgjhgkilkj.tk', '6ip.us', '6mail.cf', '6paq.com', '702mail.co.za', '74.ru', '7mail.ga', '7mail.ml', '7tags.com', '88.am', '8848.net', '888.nu', '8mail.ga', '8mail.ml', '97rock.com', '99experts.com', '9ox.net', 'a-bc.net', 'a-player.org', 'a2z4u.net', 'a45.in', 'aaamail.zzn.com', 'aahlife.com', 'aamail.net', 'aapt.net.au', 'aaronkwok.net', 'abbeyroadlondon.co.uk', 'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com', 'abcflash.net', 'abdulnour.com', 'aberystwyth.com', 'abolition-now.com', 'about.com', 'absolutevitality.com', 'abusemail.de', 'abv.bg', 'abwesend.de', 'abyssmail.com', 'ac20mail.in', 'academycougars.com', 'acceso.or.cr', 'access4less.net', 'accessgcc.com', 'accountant.com', 'acdcfan.com', 'acdczone.com', 'ace-of-base.com', 'acmecity.com', 'acmemail.net', 'acninc.net', 'acrobatmail.com', 'activatormail.com', 'activist.com', 'adam.com.au', 'add3000.pp.ua', 'addcom.de', 'address.com', 'adelphia.net', 'adexec.com', 'adfarrow.com', 'adinet.com.uy', 'adios.net', 'admin.in.th', 'administrativos.com', 'adoption.com', 'ados.fr', 'adrenalinefreak.com', 'adres.nl', 'advalvas.be', 'advantimo.com', 'aeiou.pt', 'aemail4u.com', 'aeneasmail.com', 'afreeinternet.com', 'africa-11.com', 'africamail.com', 'africamel.net', 'africanpartnersonline.com', 'afrobacon.com', 'ag.us.to', 'agedmail.com', 'agelessemail.com', 'agoodmail.com', 'ahaa.dk', 'ahk.jp', 'aichi.com', 'aim.com', 'aircraftmail.com', 'airforce.net', 'airforceemail.com', 'airmail.net', 'airpost.net', 'aiutamici.com', 'ajacied.com', 'ajaxapp.net', 'ak47.hu', 'aknet.kg', 'akphantom.com', 'albawaba.com', 'alecsmail.com', 'alex4all.com', 'alexandria.cc', 'algeria.com', 'algeriamail.com', 'alhilal.net', 'alibaba.com', 'alice.it', 'aliceadsl.fr', 'aliceinchainsmail.com', 'alivance.com', 'alive.cz', 'aliyun.com', 'allergist.com', 'allmail.net', 'alloymail.com', 'allracing.com', 'allsaintsfan.com', 'alltel.net', 'alpenjodel.de', 'alphafrau.de', 'alskens.dk', 'altavista.com', 'altavista.net', 'altavista.se', 'alternativagratis.com', 'alumni.com', 'alumnidirector.com', 'alvilag.hu', 'ama-trade.de', 'amail.com', 'amazonses.com', 'amele.com', 'america.hm', 'ameritech.net', 'amilegit.com', 'amiri.net', 'amiriindustries.com', 'amnetsal.com', 'amorki.pl', 'amrer.net', 'amuro.net', 'amuromail.com', 'ananzi.co.za', 'ancestry.com', 'andreabocellimail.com', 'andylau.net', 'anfmail.com', 'angelfan.com', 'angelfire.com', 'angelic.com', 'animail.net', 'animal.net', 'animalhouse.com', 'animalwoman.net', 'anjungcafe.com', 'anniefans.com', 'annsmail.com', 'ano-mail.net', 'anonmails.de', 'anonymbox.com', 'anonymous.to', 'anote.com', 'another.com', 'anotherdomaincyka.tk', 'anotherwin95.com', 'anti-ignorance.net', 'anti-social.com', 'antichef.com', 'antichef.net', 'antiqueemail.com', 'antireg.ru', 'antisocial.com', 'antispam.de', 'antispam24.de', 'antispammail.de', 'antongijsen.com', 'antwerpen.com', 'anymoment.com', 'anytimenow.com', 'aol.co.uk', 'aol.com', 'aol.de', 'aol.fr', 'aol.it', 'aol.jp', 'aon.at', 'apexmail.com', 'apmail.com', 'apollo.lv', 'aport.ru', 'aport2000.ru', 'apple.sib.ru', 'appraiser.net', 'approvers.net', 'aquaticmail.net', 'arabia.com', 'arabtop.net', 'arcademaster.com', 'archaeologist.com', 'archerymail.com', 'arcor.de', 'arcotronics.bg', 'arcticmail.com', 'argentina.com', 'arhaelogist.com', 'aristotle.org', 'army.net', 'armyspy.com', 'arnet.com.ar', 'art-en-ligne.pro', 'artistemail.com', 'artlover.com', 'artlover.com.au', 'artman-conception.com', 'as-if.com', 'asdasd.nl', 'asean-mail', 'asean-mail.com', 'asheville.com', 'asia-links.com', 'asia-mail.com', 'asia.com', 'asiafind.com', 'asianavenue.com', 'asiancityweb.com', 'asiansonly.net', 'asianwired.net', 'asiapoint.net', 'askaclub.ru', 'ass.pp.ua', 'assala.com', 'assamesemail.com', 'astroboymail.com', 'astrolover.com', 'astrosfan.com', 'astrosfan.net', 'asurfer.com', 'atheist.com', 'athenachu.net', 'atina.cl', 'atl.lv', 'atlanticbb.net ', 'atlas.cz', 'atlaswebmail.com', 'atlink.com', 'atmc.net', 'ato.check.com', 'atozasia.com', 'atrus.ru', 'att.net', 'attbi.com', 'attglobal.net', 'attymail.com', 'au.ru', 'auctioneer.net', 'aufeminin.com', 'aus-city.com', 'ausi.com', 'aussiemail.com.au', 'austin.rr.com', 'australia.edu', 'australiamail.com', 'austrosearch.net', 'autoescuelanerja.com', 'autograf.pl', 'automail.ru', 'automotiveauthority.com', 'autorambler.ru', 'aver.com', 'avh.hu', 'avia-tonic.fr', 'avtoritet.ru', 'awayonvacation.com', 'awholelotofamechi.com', 'awsom.net', 'axoskate.com', 'ayna.com', 'azazazatashkent.tk', 'azimiweb.com', 'azmeil.tk', 'bachelorboy.com', 'bachelorgal.com', 'backfliper.com', 'backpackers.com', 'backstreet-boys.com', 'backstreetboysclub.com', 'backtothefuturefans.com', 'backwards.com', 'badtzmail.com', 'bagherpour.com', 'bahrainmail.com', 'bakpaka.com', 'bakpaka.net', 'baldmama.de', 'baldpapa.de', 'ballerstatus.net', 'ballyfinance.com', 'balochistan.org', 'baluch.com', 'bangkok.com', 'bangkok2000.com', 'bannertown.net', 'baptistmail.com', 'baptized.com', 'barcelona.com', 'bareed.ws', 'barid.com', 'barlick.net', 'bartender.net', 'baseball-email.com', 'baseballmail.com', 'basketballmail.com', 'batuta.net', 'baudoinconsulting.com', 'baxomale.ht.cx', 'bboy.com', 'bboy.zzn.com', 'bcpl.net', 'bcvibes.com', 'beddly.com', 'beeebank.com', 'beefmilk.com', 'beenhad.com', 'beep.ru', 'beer.com', 'beerandremotes.com', 'beethoven.com', 'beirut.com', 'belice.com', 'belizehome.com', 'belizemail.net', 'belizeweb.com', 'bell.net', 'bellair.net', 'bellatlantic.net', 'bellnet.ca', 'bellsouth.net', 'berkscounty.com', 'berlin.com', 'berlin.de', 'berlinexpo.de', 'bestmail.us', 'bestweb.net', 'betriebsdirektor.de', 'bettergolf.net', 'bev.net', 'bharatmail.com', 'big1.us', 'big5mail.com', 'bigassweb.com', 'bigblue.net.au', 'bigboab.com', 'bigfoot.com', 'bigfoot.de', 'bigger.com', 'biggerbadder.com', 'bigmailbox.com', 'bigmir.net', 'bigpond.au', 'bigpond.com', 'bigpond.com.au', 'bigpond.net', 'bigpond.net.au', 'bigramp.com', 'bigstring.com', 'bikemechanics.com', 'bikeracer.com', 'bikeracers.net', 'bikerider.com', 'billsfan.com', 'billsfan.net', 'bimamail.com', 'bimla.net', 'bin-wieder-da.de', 'binkmail.com', 'bio-muesli.info', 'bio-muesli.net', 'biologyfan.com', 'birdfanatic.com', 'birdlover.com', 'birdowner.net', 'bisons.com', 'bitmail.com', 'bitpage.net', 'bizhosting.com', 'bk.ru', 'bkkmail.com', 'bla-bla.com', 'blackburnfans.com', 'blackburnmail.com', 'blackplanet.com', 'blacksburg.net', 'blader.com', 'bladesmail.net', 'blazemail.com', 'blazenet.net', 'bleib-bei-mir.de', 'blink182.net', 'blockfilter.com', 'blogmyway.org', 'blondandeasy.com', 'bluebottle.com', 'bluehyppo.com', 'bluemail.ch', 'bluemail.dk', 'bluesfan.com', 'bluewin.ch', 'blueyonder.co.uk', 'blumail.org', 'blushmail.com', 'blutig.me', 'bmlsports.net', 'boardermail.com', 'boarderzone.com', 'boatracers.com', 'bobmail.info', 'bodhi.lawlita.com', 'bofthew.com', 'bol.com.br', 'bolando.com', 'bollywoodz.com', 'bolt.com', 'boltonfans.com', 'bombdiggity.com', 'bonbon.net', 'boom.com', 'bootmail.com', 'bootybay.de', 'bornagain.com', 'bornnaked.com', 'bossofthemoss.com', 'bostonoffice.com', 'boun.cr', 'bounce.net', 'bounces.amazon.com', 'bouncr.com', 'box.az', 'box.ua', 'boxbg.com', 'boxemail.com', 'boxformail.in', 'boxfrog.com', 'boximail.com', 'boyzoneclub.com', 'bradfordfans.com', 'brasilia.net', 'bratan.ru', 'brazilmail.com', 'brazilmail.com.br', 'breadtimes.press', 'breakthru.com', 'breathe.com', 'brefmail.com', 'brennendesreich.de', 'bresnan.net', 'brestonline.com', 'brew-master.com', 'brew-meister.com', 'brfree.com.br', 'briefemail.com', 'bright.net', 'britneyclub.com', 'brittonsign.com', 'broadcast.net', 'broadwaybuff.com', 'broadwaylove.com', 'brokeandhappy.com', 'brokenvalve.com', 'brujula.net', 'brunetka.ru', 'brusseler.com', 'bsdmail.com', 'bsnow.net', 'bspamfree.org', 'bt.com', 'btcc.org', 'btcmail.pw', 'btconnect.co.uk', 'btconnect.com', 'btinternet.com', 'btopenworld.co.uk', 'buerotiger.de', 'buffymail.com', 'bugmenot.com', 'bulgaria.com', 'bullsfan.com', 'bullsgame.com', 'bumerang.ro', 'bumpymail.com', 'bumrap.com', 'bund.us', 'bunita.net', 'bunko.com', 'burnthespam.info', 'burntmail.com', 'burstmail.info', 'buryfans.com', 'bushemail.com', 'business-man.com', 'businessman.net', 'businessweekmail.com', 'bust.com', 'busta-rhymes.com', 'busymail.com', 'busymail.com.com', 'busymail.comhomeart.com', 'butch-femme.net', 'butovo.net', 'buyersusa.com', 'buymoreplays.com', 'buzy.com', 'bvimailbox.com', 'byke.com', 'byom.de', 'byteme.com', 'c2.hu', 'c2i.net', 'c3.hu', 'c4.com', 'c51vsgq.com', 'cabacabana.com', 'cable.comcast.com', 'cableone.net', 'caere.it', 'cairomail.com', 'cais.net', 'calcuttaads.com', 'calendar-server.bounces.google.com', 'calidifontain.be', 'californiamail.com', 'callnetuk.com', 'callsign.net', 'caltanet.it', 'camidge.com', 'canada-11.com', 'canada.com', 'canadianmail.com', 'canoemail.com', 'cantv.net', 'canwetalk.com', 'capu.net', 'caramail.com', 'card.zp.ua', 'care2.com', 'careceo.com', 'careerbuildermail.com', 'carioca.net', 'cartelera.org', 'cartestraina.ro', 'casablancaresort.com', 'casema.nl', 'cash4u.com', 'cashette.com', 'casino.com', 'casualdx.com', 'cataloniamail.com', 'cataz.com', 'catcha.com', 'catchamail.com', 'catemail.com', 'catholic.org', 'catlover.com', 'catsrule.garfield.com', 'ccnmail.com', 'cd2.com', 'cek.pm', 'celineclub.com', 'celtic.com', 'center-mail.de', 'centermail.at', 'centermail.com', 'centermail.de', 'centermail.info', 'centermail.net', 'centoper.it', 'centralpets.com', 'centrum.cz', 'centrum.sk', 'centurylink.net', 'centurytel.net', 'certifiedmail.com', 'cfl.rr.com', 'cgac.es', 'cghost.s-a-d.de', 'chacuo.net', 'chaiyo.com', 'chaiyomail.com', 'chalkmail.net', 'chammy.info', 'chance2mail.com', 'chandrasekar.net', 'channelonetv.com', 'charityemail.com', 'charm.net', 'charmedmail.com', 'charter.com', 'charter.net', 'chat.ru', 'chatlane.ru', 'chattown.com', 'chauhanweb.com', 'cheatmail.de', 'chechnya.conf.work', 'check.com', 'check.com12', 'check1check.com', 'cheeb.com', 'cheerful.com', 'chef.net', 'chefmail.com', 'chek.com', 'chello.nl', 'chemist.com', 'chequemail.com', 'cheshiremail.com', 'cheyenneweb.com', 'chez.com', 'chickmail.com', 'chil-e.com', 'childrens.md', 'childsavetrust.org', 'china.com', 'china.net.vg', 'chinalook.com', 'chinamail.com', 'chinesecool.com', 'chirk.com', 'chocaholic.com.au', 'chocofan.com', 'chogmail.com', 'choicemail1.com', 'chong-mail.com', 'chong-mail.net', 'christianmail.net', 'chronicspender.com', 'churchusa.com', 'cia-agent.com', 'cia.hu', 'ciaoweb.it', 'cicciociccio.com', 'cinci.rr.com', 'cincinow.net', 'cirquefans.com', 'citeweb.net', 'citiz.net', 'citlink.net', 'citromail.hu', 'city-of-bath.org', 'city-of-birmingham.com', 'city-of-brighton.org', 'city-of-cambridge.com', 'city-of-coventry.com', 'city-of-edinburgh.com', 'city-of-lichfield.com', 'city-of-lincoln.com', 'city-of-liverpool.com', 'city-of-manchester.com', 'city-of-nottingham.com', 'city-of-oxford.com', 'city-of-swansea.com', 'city-of-westminster.com', 'city-of-westminster.net', 'city-of-york.net', 'city2city.com', 'citynetusa.com', 'cityofcardiff.net', 'cityoflondon.org', 'ciudad.com.ar', 'ckaazaza.tk', 'claramail.com', 'classicalfan.com', 'classicmail.co.za', 'clear.net.nz', 'clearwire.net', 'clerk.com', 'clickforadate.com', 'cliffhanger.com', 'clixser.com', 'close2you.ne', 'close2you.net', 'clrmail.com', 'club-internet.fr', 'club4x4.net', 'clubalfa.com', 'clubbers.net', 'clubducati.com', 'clubhonda.net', 'clubmember.org', 'clubnetnoir.com', 'clubvdo.net', 'cluemail.com', 'cmail.net', 'cmail.org', 'cmail.ru', 'cmpmail.com', 'cmpnetmail.com', 'cnegal.com', 'cnnsimail.com', 'cntv.cn', 'codec.ro', 'codec.ro.ro', 'codec.roemail.ro', 'coder.hu', 'coid.biz', 'coldemail.info', 'coldmail.com', 'collectiblesuperstore.com', 'collector.org', 'collegebeat.com', 'collegeclub.com', 'collegemail.com', 'colleges.com', 'columbus.rr.com', 'columbusrr.com', 'columnist.com', 'comast.com', 'comast.net', 'comcast.com', 'comcast.net', 'comic.com', 'communityconnect.com', 'complxmind.com', 'comporium.net', 'comprendemail.com', 'compuserve.com', 'computer-expert.net', 'computer-freak.com', 'computer4u.com', 'computerconfused.com', 'computermail.net', 'computernaked.com', 'concentric.net', 'conexcol.com', 'cong.ru', 'conk.com', 'connect4free.net', 'connectbox.com', 'conok.com', 'consultant.com', 'consumerriot.com', 'contractor.net', 'contrasto.cu.cc', 'cookiemonster.com', 'cool.br', 'cool.fr.nf', 'coole-files.de', 'coolgoose.ca', 'coolgoose.com', 'coolkiwi.com', 'coollist.com', 'coolmail.com', 'coolmail.net', 'coolrio.com', 'coolsend.com', 'coolsite.net', 'cooooool.com', 'cooperation.net', 'cooperationtogo.net', 'copacabana.com', 'copper.net', 'copticmail.com', 'cornells.com', 'cornerpub.com', 'corporatedirtbag.com', 'correo.terra.com.gt', 'corrsfan.com', 'cortinet.com', 'cosmo.com', 'cotas.net', 'counsellor.com', 'countrylover.com', 'courriel.fr.nf', 'courrieltemporaire.com', 'covad.net', 'cox.com', 'cox.net', 'coxinet.net', 'coxmail.com', 'cpaonline.net', 'cracker.hu', 'craftemail.com', 'crapmail.org', 'crazedanddazed.com', 'crazy.ru', 'crazymailing.com', 'crazysexycool.com', 'crewstart.com', 'cristianemail.com', 'critterpost.com', 'croeso.com', 'crosshairs.com', 'crosslink.net', 'crosswinds.net', 'crunkmail.com', 'crwmail.com', 'cry4helponline.com', 'cryingmail.com', 'cs.com', 'csi.com', 'csinibaba.hu', 'cubiclink.com', 'cuemail.com', 'cumbriamail.com', 'curio-city.com', 'curryworld.de', 'curtsmail.com', 'cust.in', 'cute-girl.com', 'cuteandcuddly.com', 'cutekittens.com', 'cutey.com', 'cuvox.de', 'cww.de', 'cyber-africa.net', 'cyber-innovation.club', 'cyber-matrix.com', 'cyber-phone.eu', 'cyber-wizard.com', 'cyber4all.com', 'cyberbabies.com', 'cybercafemaui.com', 'cybercity-online.net', 'cyberdude.com', 'cyberforeplay.net', 'cybergal.com', 'cybergrrl.com', 'cyberinbox.com', 'cyberleports.com', 'cybermail.net', 'cybernet.it', 'cyberservices.com', 'cyberspace-asia.com', 'cybertrains.org', 'cyclefanz.com', 'cymail.net', 'cynetcity.com', 'd3p.dk', 'dabsol.net', 'dacoolest.com', 'dadacasa.com', 'daha.com', 'dailypioneer.com', 'dallas.theboys.com', 'dallasmail.com', 'dandikmail.com', 'dangerous-minds.com', 'dansegulvet.com', 'dasdasdascyka.tk', 'data54.com', 'date.by', 'daum.net', 'davegracey.com', 'dawnsonmail.com', 'dawsonmail.com', 'dayrep.com', 'dazedandconfused.com', 'dbzmail.com', 'dca.net', 'dcemail.com', 'dcsi.net', 'ddns.org', 'deadaddress.com', 'deadlymob.org', 'deadspam.com', 'deafemail.net', 'deagot.com', 'deal-maker.com', 'dearriba.com', 'death-star.com', 'deepseafisherman.net', 'deforestationsucks.com', 'degoo.com', 'dejanews.com', 'delikkt.de', 'deliveryman.com', 'deltanet.com', 'deneg.net', 'depechemode.com', 'deseretmail.com', 'desertmail.com', 'desertonline.com', 'desertsaintsmail.com', 'desilota.com', 'deskmail.com', 'deskpilot.com', 'despam.it', 'despammed.com', 'destin.com', 'detik.com', 'deutschland-net.com', 'devnullmail.com', 'devotedcouples.com', 'dezigner.ru', 'dfgh.net', 'dfwatson.com', 'dglnet.com.br', 'dgoh.org', 'di-ve.com', 'diamondemail.com', 'didamail.com', 'die-besten-bilder.de', 'die-genossen.de', 'die-optimisten.de', 'die-optimisten.net', 'die.life', 'diehardmail.com', 'diemailbox.de', 'digibel.be', 'digital-filestore.de', 'digitalforeplay.net', 'digitalsanctuary.com', 'digosnet.com', 'dingbone.com', 'diplomats.com', 'directbox.com', 'director-general.com', 'direcway.com', 'diri.com', 'dirtracer.com', 'dirtracers.com', 'discard.email', 'discard.ga', 'discard.gq', 'discardmail.com', 'discardmail.de', 'disciples.com', 'discofan.com', 'discovery.com', 'discoverymail.com', 'discoverymail.net', 'disign-concept.eu', 'disign-revelation.com', 'disinfo.net', 'dispomail.eu', 'disposable.com', 'disposableaddress.com', 'disposableemailaddresses.com', 'disposableinbox.com', 'dispose.it', 'dispostable.com', 'divismail.ru', 'divorcedandhappy.com', 'dm.w3internet.co.uk', 'dmailman.com', 'dmitrovka.net', 'dmitry.ru', 'dmv.com', 'dnainternet.net', 'dnsmadeeasy.com', 'doar.net', 'doclist.bounces.google.com', 'docmail.cz', 'docs.google.com', 'doctor.com', 'dodgeit.com', 'dodgit.com', 'dodgit.org', 'dodo.com.au', 'dodsi.com', 'dog.com', 'dogit.com', 'doglover.com', 'dogmail.co.uk', 'dogsnob.net', 'doityourself.com', 'domforfb1.tk', 'domforfb2.tk', 'domforfb3.tk', 'domforfb4.tk', 'domforfb5.tk', 'domforfb6.tk', 'domforfb7.tk', 'domforfb8.tk', 'domozmail.com', 'doneasy.com', 'donegal.net', 'donemail.ru', 'donjuan.com', 'dontgotmail.com', 'dontmesswithtexas.com', 'dontreg.com', 'dontsendmespam.de', 'doramail.com', 'dostmail.com', 'dotcom.fr', 'dotmsg.com', 'dotnow.com', 'dott.it', 'download-privat.de', 'dplanet.ch', 'dr.com', 'dragoncon.net', 'dragracer.com', 'drdrb.net', 'drivehq.com', 'dropmail.me', 'dropzone.com', 'drotposta.hu', 'dslextreme.com', 'dubaimail.com', 'dublin.com', 'dublin.ie', 'dump-email.info', 'dumpandjunk.com', 'dumpmail.com', 'dumpmail.de', 'dumpyemail.com', 'dunlopdriver.com', 'dunloprider.com', 'duno.com', 'duskmail.com', 'dustdevil.com', 'dutchmail.com', 'dvd-fan.net', 'dwp.net', 'dygo.com', 'dynamitemail.com', 'dyndns.org', 'e-apollo.lv', 'e-hkma.com', 'e-mail.com', 'e-mail.com.tr', 'e-mail.dk', 'e-mail.org', 'e-mail.ru', 'e-mail.ua', 'e-mailanywhere.com', 'e-mails.ru', 'e-tapaal.com', 'e-webtec.com', 'e4ward.com', 'earthalliance.com', 'earthcam.net', 'earthdome.com', 'earthling.net', 'earthlink.net', 'earthonline.net', 'eastcoast.co.za', 'eastlink.ca', 'eastmail.com', 'eastrolog.com', 'easy.com', 'easy.to', 'easypeasy.com', 'easypost.com', 'easytrashmail.com', 'eatmydirt.com', 'ebprofits.net', 'ec.rr.com', 'ecardmail.com', 'ecbsolutions.net', 'echina.com', 'ecolo-online.fr', 'ecompare.com', 'edge.net', 'edmail.com', 'ednatx.com', 'edtnmail.com', 'educacao.te.pt', 'educastmail.com', 'eelmail.com', 'ehmail.com', 'einmalmail.de', 'einrot.com', 'einrot.de', 'eintagsmail.de', 'eircom.net', 'ekidz.com.au', 'elisanet.fi', 'elitemail.org', 'elsitio.com', 'eltimon.com', 'elvis.com', 'elvisfan.com', 'email-fake.gq', 'email-london.co.uk', 'email-value.com', 'email.biz', 'email.cbes.net', 'email.com', 'email.cz', 'email.ee', 'email.it', 'email.nu', 'email.org', 'email.ro', 'email.ru', 'email.si', 'email.su', 'email.ua', 'email.women.com', 'email2me.com', 'email2me.net', 'email4u.info', 'email60.com', 'emailacc.com', 'emailaccount.com', 'emailaddresses.com', 'emailage.ga', 'emailage.gq', 'emailasso.net', 'emailchoice.com', 'emailcorner.net', 'emailem.com', 'emailengine.net', 'emailengine.org', 'emailer.hubspot.com', 'emailforyou.net', 'emailgaul.com', 'emailgo.de', 'emailgroups.net', 'emailias.com', 'emailinfive.com', 'emailit.com', 'emaillime.com', 'emailmiser.com', 'emailoregon.com', 'emailpinoy.com', 'emailplanet.com', 'emailplus.org', 'emailproxsy.com', 'emails.ga', 'emails.incisivemedia.com', 'emails.ru', 'emailsensei.com', 'emailservice.com', 'emailsydney.com', 'emailtemporanea.com', 'emailtemporanea.net', 'emailtemporar.ro', 'emailtemporario.com.br', 'emailthe.net', 'emailtmp.com', 'emailto.de', 'emailuser.net', 'emailwarden.com', 'emailx.at.hm', 'emailx.net', 'emailxfer.com', 'emailz.ga', 'emailz.gq', 'emale.ru', 'ematic.com', 'embarqmail.com', 'emeil.in', 'emeil.ir', 'emil.com', 'eml.cc', 'eml.pp.ua', 'empereur.com', 'emptymail.com', 'emumail.com', 'emz.net', 'end-war.com', 'enel.net', 'enelpunto.net', 'engineer.com', 'england.com', 'england.edu', 'englandmail.com', 'enter.net', 'epage.ru', 'epatra.com', 'ephemail.net', 'epiqmail.com', 'epix.net', 'epomail.com', 'epost.de', 'eposta.hu', 'eprompter.com', 'eqqu.com', 'eramail.co.za', 'eresmas.com', 'eriga.lv', 'ero-tube.org', 'erols.com', 'eshche.net', 'esmailweb.net', 'estranet.it', 'ethos.st', 'etoast.com', 'etrademail.com', 'etranquil.com', 'etranquil.net', 'eudoramail.com', 'europamel.net', 'europe.com', 'europemail.com', 'euroseek.com', 'eurosport.com', 'evafan.com', 'evertonfans.com', 'every1.net', 'everyday.com.kh', 'everymail.net', 'everyone.net', 'everytg.ml', 'evopo.com', 'examnotes.net', 'excite.co.jp', 'excite.co.uk', 'excite.com', 'excite.it', 'execs.com', 'execs2k.com', 'executivemail.co.za', 'exemail.com.au', 'exg6.exghost.com', 'existiert.net', 'explodemail.com', 'express.net.ua', 'expressasia.com', 'extenda.net', 'extended.com', 'extremail.ru', 'eyepaste.com', 'eyou.com', 'ezagenda.com', 'ezcybersearch.com', 'ezmail.egine.com', 'ezmail.ru', 'ezrs.com', 'ezy.net', 'f-m.fm', 'f1fans.net', 'facebook-email.ga', 'facebook.com', 'facebookmail.com', 'facebookmail.gq', 'fadrasha.net', 'fadrasha.org', 'fahr-zur-hoelle.org', 'fake-email.pp.ua', 'fake-mail.cf', 'fake-mail.ga', 'fake-mail.ml', 'fakeinbox.com', 'fakeinformation.com', 'fakemailz.com', 'falseaddress.com', 'fan.com', 'fan.theboys.com', 'fannclub.com', 'fansonlymail.com', 'fansworldwide.de', 'fantasticmail.com', 'fantasymail.de', 'farang.net', 'farifluset.mailexpire.com', 'faroweb.com', 'fast-email.com', 'fast-mail.fr', 'fast-mail.org', 'fastacura.com', 'fastchevy.com', 'fastchrysler.com', 'fastem.com', 'fastemail.us', 'fastemailer.com', 'fastemailextractor.net', 'fastermail.com', 'fastest.cc', 'fastimap.com', 'fastkawasaki.com', 'fastmail.ca', 'fastmail.cn', 'fastmail.co.uk', 'fastmail.com', 'fastmail.com.au', 'fastmail.es', 'fastmail.fm', 'fastmail.gr', 'fastmail.im', 'fastmail.in', 'fastmail.jp', 'fastmail.mx', 'fastmail.net', 'fastmail.nl', 'fastmail.se', 'fastmail.to', 'fastmail.tw', 'fastmail.us', 'fastmailbox.net', 'fastmazda.com', 'fastmessaging.com', 'fastmitsubishi.com', 'fastnissan.com', 'fastservice.com', 'fastsubaru.com', 'fastsuzuki.com', 'fasttoyota.com', 'fastyamaha.com', 'fatcock.net', 'fatflap.com', 'fathersrightsne.org', 'fatyachts.com', 'fax.ru', 'fbi-agent.com', 'fbi.hu', 'fcc.net', 'fdfdsfds.com', 'fea.st', 'federalcontractors.com', 'feinripptraeger.de', 'felicity.com', 'felicitymail.com', 'female.ru', 'femenino.com', 'fepg.net', 'fetchmail.co.uk', 'fetchmail.com', 'fettabernett.de', 'feyenoorder.com', 'ffanet.com', 'fiberia.com', 'fibertel.com.ar', 'ficken.de', 'fificorp.com', 'fificorp.net', 'fightallspam.com', 'filipinolinks.com', 'filzmail.com', 'financefan.net', 'financemail.net', 'financier.com', 'findfo.com', 'findhere.com', 'findmail.com', 'findmemail.com', 'finebody.com', 'fineemail.com', 'finfin.com', 'finklfan.com', 'fire-brigade.com', 'fireman.net', 'fishburne.org', 'fishfuse.com', 'fivemail.de', 'fixmail.tk', 'fizmail.com', 'flashbox.5july.org', 'flashcom.net', 'flashemail.com', 'flashmail.com', 'flashmail.net', 'fleckens.hu', 'flipcode.com', 'floridaemail.net', 'flytecrew.com', 'fmail.co.uk', 'fmailbox.com', 'fmgirl.com', 'fmguy.com', 'fnbmail.co.za', 'fnmail.com', 'folkfan.com', 'foodmail.com', 'footard.com', 'football.theboys.com', 'footballmail.com', 'foothills.net', 'for-president.com', 'force9.co.uk', 'forfree.at', 'forgetmail.com', 'fornow.eu', 'forpresident.com', 'forthnet.gr', 'fortuncity.com', 'fortunecity.com', 'forum.dk', 'fossefans.com', 'foxmail.com', 'fr33mail.info', 'francefans.com', 'francemel.fr', 'frapmail.com', 'free-email.ga', 'free-online.net', 'free-org.com', 'free.com.pe', 'free.fr', 'freeaccess.nl', 'freeaccount.com', 'freeandsingle.com', 'freebox.com', 'freedom.usa.com', 'freedomlover.com', 'freefanmail.com', 'freegates.be', 'freeghana.com', 'freelance-france.eu', 'freeler.nl', 'freemail.bozz.com', 'freemail.c3.hu', 'freemail.com.au', 'freemail.com.pk', 'freemail.de', 'freemail.et', 'freemail.gr', 'freemail.hu', 'freemail.it', 'freemail.lt', 'freemail.ms', 'freemail.nl', 'freemail.org.mk', 'freemail.ru', 'freemails.ga', 'freemeil.gq', 'freenet.de', 'freenet.kg', 'freeola.com', 'freeola.net', 'freeproblem.com', 'freesbee.fr', 'freeserve.co.uk', 'freeservers.com', 'freestamp.com', 'freestart.hu', 'freesurf.fr', 'freesurf.nl', 'freeuk.com', 'freeuk.net', 'freeukisp.co.uk', 'freeweb.org', 'freewebemail.com', 'freeyellow.com', 'freezone.co.uk', 'fresnomail.com', 'freudenkinder.de', 'freundin.ru', 'friction.net', 'friendlydevices.com', 'friendlymail.co.uk', 'friends-cafe.com', 'friendsfan.com', 'from-africa.com', 'from-america.com', 'from-argentina.com', 'from-asia.com', 'from-australia.com', 'from-belgium.com', 'from-brazil.com', 'from-canada.com', 'from-china.net', 'from-england.com', 'from-europe.com', 'from-france.net', 'from-germany.net', 'from-holland.com', 'from-israel.com', 'from-italy.net', 'from-japan.net', 'from-korea.com', 'from-mexico.com', 'from-outerspace.com', 'from-russia.com', 'from-spain.net', 'fromalabama.com', 'fromalaska.com', 'fromarizona.com', 'fromarkansas.com', 'fromcalifornia.com', 'fromcolorado.com', 'fromconnecticut.com', 'fromdelaware.com', 'fromflorida.net', 'fromgeorgia.com', 'fromhawaii.net', 'fromidaho.com', 'fromillinois.com', 'fromindiana.com', 'frominter.net', 'fromiowa.com', 'fromjupiter.com', 'fromkansas.com', 'fromkentucky.com', 'fromlouisiana.com', 'frommaine.net', 'frommaryland.com', 'frommassachusetts.com', 'frommiami.com', 'frommichigan.com', 'fromminnesota.com', 'frommississippi.com', 'frommissouri.com', 'frommontana.com', 'fromnebraska.com', 'fromnevada.com', 'fromnewhampshire.com', 'fromnewjersey.com', 'fromnewmexico.com', 'fromnewyork.net', 'fromnorthcarolina.com', 'fromnorthdakota.com', 'fromohio.com', 'fromoklahoma.com', 'fromoregon.net', 'frompennsylvania.com', 'fromrhodeisland.com', 'fromru.com', 'fromru.ru', 'fromsouthcarolina.com', 'fromsouthdakota.com', 'fromtennessee.com', 'fromtexas.com', 'fromthestates.com', 'fromutah.com', 'fromvermont.com', 'fromvirginia.com', 'fromwashington.com', 'fromwashingtondc.com', 'fromwestvirginia.com', 'fromwisconsin.com', 'fromwyoming.com', 'front.ru', 'frontier.com', 'frontiernet.net', 'frostbyte.uk.net', 'fsmail.net', 'ftc-i.net', 'ftml.net', 'fuckingduh.com', 'fudgerub.com', 'fullchannel.net', 'fullmail.com', 'funiran.com', 'funkfan.com', 'funky4.com', 'fuorissimo.com', 'furnitureprovider.com', 'fuse.net', 'fusemail.com', 'fut.es', 'fux0ringduh.com', 'fwnb.com', 'fxsmails.com', 'fyii.de', 'galamb.net', 'galaxy5.com', 'galaxyhit.com', 'gamebox.com', 'gamebox.net', 'gamegeek.com', 'games.com', 'gamespotmail.com', 'gamil.com', 'gamil.com.au', 'gamno.config.work', 'garbage.com', 'gardener.com', 'garliclife.com', 'gateway.net', 'gatwickemail.com', 'gawab.com', 'gay.com', 'gaybrighton.co.uk', 'gaza.net', 'gazeta.pl', 'gazibooks.com', 'gci.net', 'gdi.net', 'gee-wiz.com', 'geecities.com', 'geek.com', 'geek.hu', 'geeklife.com', 'gehensiemirnichtaufdensack.de', 'gelitik.in', 'gencmail.com', 'general-hospital.com', 'gentlemansclub.de', 'genxemail.com', 'geocities.com', 'geography.net', 'geologist.com', 'geopia.com', 'germanymail.com', 'get.pp.ua', 'get1mail.com', 'get2mail.fr', 'getairmail.cf', 'getairmail.com', 'getairmail.ga', 'getairmail.gq', 'getmails.eu', 'getonemail.com', 'getonemail.net', 'gfxartist.ru', 'gh2000.com', 'ghanamail.com', 'ghostmail.com', 'ghosttexter.de', 'giantmail.de', 'giantsfan.com', 'giga4u.de', 'gigileung.org', 'girl4god.com', 'girlsundertheinfluence.com', 'gishpuppy.com', 'givepeaceachance.com', 'glay.org', 'glendale.net', 'globalfree.it', 'globalpagan.com', 'globalsite.com.br', 'globetrotter.net', 'globo.com', 'globomail.com', 'gmail.co.za', 'gmail.com', 'gmail.com.au', 'gmail.com.br', 'gmail.ru', 'gmial.com', 'gmx.at', 'gmx.ch', 'gmx.co.uk', 'gmx.com', 'gmx.de', 'gmx.fr', 'gmx.li', 'gmx.net', 'gmx.us', 'gnwmail.com', 'go.com', 'go.ro', 'go.ru', 'go2.com.py', 'go2net.com', 'go4.it', 'gobrainstorm.net', 'gocollege.com', 'gocubs.com', 'godmail.dk', 'goemailgo.com', 'gofree.co.uk', 'gol.com', 'goldenmail.ru', 'goldmail.ru', 'goldtoolbox.com', 'golfemail.com', 'golfilla.info', 'golfmail.be', 'gonavy.net', 'gonuts4free.com', 'goodnewsmail.com', 'goodstick.com', 'google.com', 'googlegroups.com', 'googlemail.com', 'goosemoose.com', 'goplay.com', 'gorillaswithdirtyarmpits.com', 'gorontalo.net', 'gospelfan.com', 'gothere.uk.com', 'gotmail.com', 'gotmail.net', 'gotmail.org', 'gotomy.com', 'gotti.otherinbox.com', 'govolsfan.com', 'gportal.hu', 'grabmail.com', 'graduate.org', 'graffiti.net', 'gramszu.net', 'grandmamail.com', 'grandmasmail.com', 'graphic-designer.com', 'grapplers.com', 'gratisweb.com', 'great-host.in', 'greenmail.net', 'greensloth.com', 'groupmail.com', 'grr.la', 'grungecafe.com', 'gsrv.co.uk', 'gtemail.net', 'gti.net', 'gtmc.net', 'gua.net', 'guerillamail.biz', 'guerillamail.com', 'guerrillamail.biz', 'guerrillamail.com', 'guerrillamail.de', 'guerrillamail.info', 'guerrillamail.net', 'guerrillamail.org', 'guerrillamailblock.com', 'guessmail.com', 'guju.net', 'gurlmail.com', 'gustr.com', 'guy.com', 'guy2.com', 'guyanafriends.com', 'gwhsgeckos.com', 'gyorsposta.com', 'gyorsposta.hu', 'h-mail.us', 'hab-verschlafen.de', 'hablas.com', 'habmalnefrage.de', 'hacccc.com', 'hackermail.com', 'hackermail.net', 'hailmail.net', 'hairdresser.com', 'hairdresser.net', 'haltospam.com', 'hamptonroads.com', 'handbag.com', 'handleit.com', 'hang-ten.com', 'hangglidemail.com', 'hanmail.net', 'happemail.com', 'happycounsel.com', 'happypuppy.com', 'harakirimail.com', 'haramamba.ru', 'hardcorefreak.com', 'hardyoungbabes.com', 'hartbot.de', 'hat-geld.de', 'hatespam.org', 'hawaii.rr.com', 'hawaiiantel.net', 'headbone.com', 'healthemail.net', 'heartthrob.com', 'heavynoize.net', 'heerschap.com', 'heesun.net', 'hehe.com', 'hello.hu', 'hello.net.au', 'hello.to', 'hellokitty.com', 'helter-skelter.com', 'hempseed.com', 'herediano.com', 'heremail.com', 'herono1.com', 'herp.in', 'herr-der-mails.de', 'hetnet.nl', 'hewgen.ru', 'hey.to', 'hhdevel.com', 'hideakifan.com', 'hidemail.de', 'hidzz.com', 'highmilton.com', 'highquality.com', 'highveldmail.co.za', 'hilarious.com', 'hinduhome.com', 'hingis.org', 'hiphopfan.com', 'hispavista.com', 'hitmail.com', 'hitmanrecords.com', 'hitthe.net', 'hkg.net', 'hkstarphoto.com', 'hmamail.com', 'hochsitze.com', 'hockeymail.com', 'hollywoodkids.com', 'home-email.com', 'home.de', 'home.nl', 'home.no.net', 'home.ro', 'home.se', 'homeart.com', 'homelocator.com', 'homemail.com', 'homenetmail.com', 'homeonthethrone.com', 'homestead.com', 'homeworkcentral.com', 'honduras.com', 'hongkong.com', 'hookup.net', 'hoopsmail.com', 'hopemail.biz', 'horrormail.com', 'host-it.com.sg', 'hot-mail.gq', 'hot-shop.com', 'hot-shot.com', 'hot.ee', 'hotbot.com', 'hotbox.ru', 'hotbrev.com', 'hotcoolmail.com', 'hotepmail.com', 'hotfire.net', 'hotletter.com', 'hotlinemail.com', 'hotmail.be', 'hotmail.ca', 'hotmail.ch', 'hotmail.co', 'hotmail.co.il', 'hotmail.co.jp', 'hotmail.co.nz', 'hotmail.co.uk', 'hotmail.co.za', 'hotmail.com', 'hotmail.com.ar', 'hotmail.com.au', 'hotmail.com.br', 'hotmail.com.mx', 'hotmail.com.tr', 'hotmail.de', 'hotmail.es', 'hotmail.fi', 'hotmail.fr', 'hotmail.it', 'hotmail.kg', 'hotmail.kz', 'hotmail.my', 'hotmail.nl', 'hotmail.ro', 'hotmail.roor', 'hotmail.ru', 'hotpop.com', 'hotpop3.com', 'hotvoice.com', 'housefan.com', 'housefancom', 'housemail.com', 'hsuchi.net', 'html.tou.com', 'hu2.ru', 'hughes.net', 'hulapla.de', 'humanoid.net', 'humanux.com', 'humn.ws.gy', 'humour.com', 'hunsa.com', 'hurting.com', 'hush.com', 'hushmail.com', 'hypernautica.com', 'i-connect.com', 'i-france.com', 'i-love-cats.com', 'i-mail.com.au', 'i-mailbox.net', 'i-p.com', 'i-plus.net', 'i.am', 'i.am.to', 'i.amhey.to', 'i.ua', 'i12.com', 'i2828.com', 'i2pmail.org', 'iam4msu.com', 'iamawoman.com', 'iamfinallyonline.com', 'iamwaiting.com', 'iamwasted.com', 'iamyours.com', 'ibm.net', 'icestorm.com', 'ich-bin-verrueckt-nach-dir.de', 'ich-will-net.de', 'icloud.com', 'icmsconsultants.com', 'icq.com', 'icqmail.com', 'icrazy.com', 'icu.md', 'id-base.com', 'id.ru', 'ididitmyway.com', 'idigjesus.com', 'idirect.com', 'ieatspam.eu', 'ieatspam.info', 'ieh-mail.de', 'iespana.es', 'ifoward.com', 'ig.com.br', 'ignazio.it', 'ignmail.com', 'ihateclowns.com', 'ihateyoualot.info', 'iheartspam.org', 'ihwy.com', 'iinet.net.au', 'ijustdontcare.com', 'ikbenspamvrij.nl', 'ilkposta.com', 'ilovechocolate.com', 'ilovegiraffes.net', 'ilovejesus.com', 'ilovelionking.com', 'ilovepokemonmail.com', 'ilovethemovies.com', 'ilovetocollect.net', 'ilse.nl', 'imaginemail.com', 'imail.org', 'imail.ru', 'imailbox.com', 'imails.info', 'imap-mail.com', 'imap.cc', 'imapmail.org', 'imel.org', 'imgof.com', 'imgv.de', 'immo-gerance.info', 'imneverwrong.com', 'imposter.co.uk', 'imstations.com', 'imstressed.com', 'imtoosexy.com', 'in-box.net', 'in2jesus.com', 'iname.com', 'inbax.tk', 'inbound.plus', 'inbox.com', 'inbox.lv', 'inbox.net', 'inbox.ru', 'inbox.si', 'inboxalias.com', 'inboxclean.com', 'inboxclean.org', 'incamail.com', 'includingarabia.com', 'incredimail.com', 'indeedemail.com', 'index.ua', 'indexa.fr', 'india.com', 'indiatimes.com', 'indo-mail.com', 'indocities.com', 'indomail.com', 'indosat.net.id', 'indus.ru', 'indyracers.com', 'inerted.com', 'inet.com', 'inet.net.au', 'info-media.de', 'info-radio.ml', 'info.com', 'info66.com', 'infoapex.com', 'infocom.zp.ua', 'infohq.com', 'infomail.es', 'infomart.or.jp', 'informaticos.com', 'infospacemail.com', 'infovia.com.ar', 'inicia.es', 'inmail.sk', 'inmail24.com', 'inmano.com', 'inmynetwork.tk', 'innocent.com', 'inonesearch.com', 'inorbit.com', 'inoutbox.com', 'insidebaltimore.net', 'insight.rr.com', 'inspectorjavert.com', 'instant-mail.de', 'instantemailaddress.com', 'instantmail.fr', 'instruction.com', 'instructor.net', 'insurer.com', 'integra.net', 'interaccess.com', 'interburp.com', 'interfree.it', 'interia.pl', 'interlap.com.ar', 'intermail.co.il', 'internet-club.com', 'internet-e-mail.com', 'internet-mail.org', 'internet-police.com', 'internetbiz.com', 'internetdrive.com', 'internetegypt.com', 'internetemails.net', 'internetmailing.net', 'internetmci.com', 'internode.on.net', 'invalid.com', 'investormail.com', 'inwind.it', 'iobox.com', 'iobox.fi', 'iol.it', 'iol.pt', 'ionet.net', 'iowaemail.com', 'ip3.com', 'ip4.pp.ua', 'ip6.li', 'ip6.pp.ua', 'ipdeer.com', 'ipex.ru', 'ipoo.org', 'iportalexpress.com', 'iprimus.com.au', 'iqemail.com', 'irangate.net', 'iraqmail.com', 'ireland.com', 'irelandmail.com', 'iremail.de', 'irish2me.com', 'irj.hu', 'iroid.com', 'iscooler.com', 'isellcars.com', 'iservejesus.com', 'islamonline.net', 'islandemail.net', 'isleuthmail.com', 'ismart.net', 'isonfire.com', 'isp9.net', 'israelmail.com', 'ist-allein.info', 'ist-einmalig.de', 'ist-ganz-allein.de', 'ist-willig.de', 'italymail.com', 'itelefonica.com.br', 'itloox.com', 'itmom.com', 'itol.com', 'ivebeenframed.com', 'ivillage.com', 'iwan-fals.com', 'iwi.net', 'iwmail.com', 'iwon.com', 'izadpanah.com', 'jabble.com', 'jahoopa.com', 'jakuza.hu', 'japan.com', 'jaydemail.com', 'jazzandjava.com', 'jazzfan.com', 'jazzgame.com', 'je-recycle.info', 'jeanvaljean.com', 'jerusalemmail.com', 'jesusanswers.com', 'jet-renovation.fr', 'jetable.com', 'jetable.de', 'jetable.fr.nf', 'jetable.net', 'jetable.org', 'jetable.pp.ua', 'jetemail.net', 'jewishmail.com', 'jfkislanders.com', 'jingjo.net', 'jippii.fi', 'jmail.co.za', 'jnxjn.com', 'job4u.com', 'jobbikszimpatizans.hu', 'joelonsoftware.com', 'joinme.com', 'jojomail.com', 'jokes.com', 'jordanmail.com', 'journalist.com', 'jourrapide.com', 'jovem.te.pt', 'joymail.com', 'jpopmail.com', 'jsrsolutions.com', 'jubiimail.dk', 'jump.com', 'jumpy.it', 'juniormail.com', 'junk1e.com', 'junkmail.com', 'junkmail.gq', 'juno.com', 'justemail.net', 'justicemail.com', 'justmail.de', 'justmailz.com', 'justmarriedmail.com', 'jwspamspy ', 'k.ro', 'kaazoo.com', 'kabissa.org', 'kaduku.net', 'kaffeeschluerfer.com', 'kaffeeschluerfer.de', 'kaixo.com', 'kalpoint.com', 'kansascity.com', 'kapoorweb.com', 'karachian.com', 'karachioye.com', 'karbasi.com', 'kasmail.com', 'kaspop.com', 'katamail.com', 'kayafmmail.co.za', 'kbjrmail.com', 'kcks.com', 'kebi.com', 'keftamail.com', 'keg-party.com', 'keinpardon.de', 'keko.com.ar', 'kellychen.com', 'keptprivate.com', 'keromail.com', 'kewpee.com', 'keyemail.com', 'kgb.hu', 'khosropour.com', 'kichimail.com', 'kickassmail.com', 'killamail.com', 'killergreenmail.com', 'killermail.com', 'killmail.com', 'killmail.net', 'kimo.com', 'kimsdisk.com', 'kinglibrary.net', 'kinki-kids.com', 'kismail.ru', 'kissfans.com', 'kitemail.com', 'kittymail.com', 'kitznet.at', 'kiwibox.com', 'kiwitown.com', 'klassmaster.com', 'klassmaster.net', 'klzlk.com', 'km.ru', 'kmail.com.au', 'knol-power.nl', 'koko.com', 'kolumbus.fi', 'kommespaeter.de', 'konkovo.net', 'konsul.ru', 'konx.com', 'korea.com', 'koreamail.com', 'kosino.net', 'koszmail.pl', 'kozmail.com', 'kpnmail.nl', 'kreditor.ru', 'krim.ws', 'krongthip.com', 'krovatka.net', 'krunis.com', 'ksanmail.com', 'ksee24mail.com', 'kube93mail.com', 'kukamail.com', 'kulturbetrieb.info', 'kumarweb.com', 'kurzepost.de', 'kuwait-mail.com', 'kuzminki.net', 'kyokodate.com', 'kyokofukada.net', 'l33r.eu', 'la.com', 'labetteraverouge.at', 'lackmail.ru', 'ladyfire.com', 'ladymail.cz', 'lagerlouts.com', 'lags.us', 'lahoreoye.com', 'lakmail.com', 'lamer.hu', 'land.ru', 'langoo.com', 'lankamail.com', 'laoeq.com', 'laposte.net', 'lass-es-geschehen.de', 'last-chance.pro', 'lastmail.co', 'latemodels.com', 'latinmail.com', 'latino.com', 'lavabit.com', 'lavache.com', 'law.com', 'lawlita.com', 'lawyer.com', 'lazyinbox.com', 'learn2compute.net', 'lebanonatlas.com', 'leeching.net', 'leehom.net', 'lefortovo.net', 'legalactions.com', 'legalrc.loan', 'legislator.com', 'legistrator.com', 'lenta.ru', 'leonlai.net', 'letsgomets.net', 'letterbox.com', 'letterboxes.org', 'letthemeatspam.com', 'levele.com', 'levele.hu', 'lex.bg', 'lexis-nexis-mail.com', 'lhsdv.com', 'lianozovo.net', 'libero.it', 'liberomail.com', 'lick101.com', 'liebt-dich.info', 'lifebyfood.com', 'link2mail.net', 'linkmaster.com', 'linktrader.com', 'linuxfreemail.com', 'linuxmail.org', 'lionsfan.com.au', 'liontrucks.com', 'liquidinformation.net', 'lissamail.com', 'list.ru', 'listomail.com', 'litedrop.com', 'literaturelover.com', 'littleapple.com', 'littleblueroom.com', 'live.at', 'live.be', 'live.ca', 'live.cl', 'live.cn', 'live.co.uk', 'live.co.za', 'live.com', 'live.com.ar', 'live.com.au', 'live.com.mx', 'live.com.my', 'live.com.pt', 'live.com.sg', 'live.de', 'live.dk', 'live.fr', 'live.hk', 'live.ie', 'live.in', 'live.it', 'live.jp', 'live.nl', 'live.no', 'live.ru', 'live.se', 'liveradio.tk', 'liverpoolfans.com', 'ljiljan.com', 'llandudno.com', 'llangollen.com', 'lmxmail.sk', 'lobbyist.com', 'localbar.com', 'localgenius.com', 'locos.com', 'login-email.ga', 'loh.pp.ua', 'lol.ovpn.to', 'lolfreak.net', 'lolito.tk', 'lolnetwork.net', 'london.com', 'loobie.com', 'looksmart.co.uk', 'looksmart.com', 'looksmart.com.au', 'lookugly.com', 'lopezclub.com', 'lortemail.dk', 'louiskoo.com', 'lov.ru', 'love.com', 'love.cz', 'loveable.com', 'lovecat.com', 'lovefall.ml', 'lovefootball.com', 'loveforlostcats.com', 'lovelygirl.net', 'lovemail.com', 'lover-boy.com', 'lovergirl.com', 'lovesea.gq', 'lovethebroncos.com', 'lovethecowboys.com', 'lovetocook.net', 'lovetohike.com', 'loveyouforever.de', 'lovingjesus.com', 'lowandslow.com', 'lr7.us', 'lr78.com', 'lroid.com', 'lubovnik.ru', 'lukop.dk', 'luso.pt', 'luukku.com', 'luv2.us', 'luvrhino.com', 'lvie.com.sg', 'lvwebmail.com', 'lycos.co.uk', 'lycos.com', 'lycos.es', 'lycos.it', 'lycos.ne.jp', 'lycos.ru', 'lycosemail.com', 'lycosmail.com', 'm-a-i-l.com', 'm-hmail.com', 'm21.cc', 'm4.org', 'm4ilweb.info', 'mac.com', 'macbox.com', 'macbox.ru', 'macfreak.com', 'machinecandy.com', 'macmail.com', 'mad.scientist.com', 'madcrazy.com', 'madcreations.com', 'madonnafan.com', 'madrid.com', 'maennerversteherin.com', 'maennerversteherin.de', 'maffia.hu', 'magicmail.co.za', 'magspam.net', 'mahmoodweb.com', 'mail-awu.de', 'mail-box.cz', 'mail-center.com', 'mail-central.com', 'mail-easy.fr', 'mail-filter.com', 'mail-me.com', 'mail-page.com', 'mail-temporaire.fr', 'mail-tester.com', 'mail.austria.com', 'mail.az', 'mail.be', 'mail.bg', 'mail.bulgaria.com', 'mail.by', 'mail.byte.it', 'mail.co.za', 'mail.com', 'mail.com.tr', 'mail.de', 'mail.ee', 'mail.entrepeneurmag.com', 'mail.freetown.com', 'mail.gr', 'mail.hitthebeach.com', 'mail.htl22.at', 'mail.kmsp.com', 'mail.md', 'mail.mezimages.net', 'mail.misterpinball.de', 'mail.nu', 'mail.org.uk', 'mail.pf', 'mail.pharmacy.com', 'mail.pt', 'mail.r-o-o-t.com', 'mail.ru', 'mail.salu.net', 'mail.sisna.com', 'mail.spaceports.com', 'mail.svenz.eu', 'mail.theboys.com', 'mail.usa.com', 'mail.vasarhely.hu', 'mail.vu', 'mail.wtf', 'mail.zp.ua', 'mail114.net', 'mail15.com', 'mail1a.de', 'mail1st.com', 'mail2007.com', 'mail21.cc', 'mail2aaron.com', 'mail2abby.com', 'mail2abc.com', 'mail2actor.com', 'mail2admiral.com', 'mail2adorable.com', 'mail2adoration.com', 'mail2adore.com', 'mail2adventure.com', 'mail2aeolus.com', 'mail2aether.com', 'mail2affection.com', 'mail2afghanistan.com', 'mail2africa.com', 'mail2agent.com', 'mail2aha.com', 'mail2ahoy.com', 'mail2aim.com', 'mail2air.com', 'mail2airbag.com', 'mail2airforce.com', 'mail2airport.com', 'mail2alabama.com', 'mail2alan.com', 'mail2alaska.com', 'mail2albania.com', 'mail2alcoholic.com', 'mail2alec.com', 'mail2alexa.com', 'mail2algeria.com', 'mail2alicia.com', 'mail2alien.com', 'mail2allan.com', 'mail2allen.com', 'mail2allison.com', 'mail2alpha.com', 'mail2alyssa.com', 'mail2amanda.com', 'mail2amazing.com', 'mail2amber.com', 'mail2america.com', 'mail2american.com', 'mail2andorra.com', 'mail2andrea.com', 'mail2andy.com', 'mail2anesthesiologist.com', 'mail2angela.com', 'mail2angola.com', 'mail2ann.com', 'mail2anna.com', 'mail2anne.com', 'mail2anthony.com', 'mail2anything.com', 'mail2aphrodite.com', 'mail2apollo.com', 'mail2april.com', 'mail2aquarius.com', 'mail2arabia.com', 'mail2arabic.com', 'mail2architect.com', 'mail2ares.com', 'mail2argentina.com', 'mail2aries.com', 'mail2arizona.com', 'mail2arkansas.com', 'mail2armenia.com', 'mail2army.com', 'mail2arnold.com', 'mail2art.com', 'mail2artemus.com', 'mail2arthur.com', 'mail2artist.com', 'mail2ashley.com', 'mail2ask.com', 'mail2astronomer.com', 'mail2athena.com', 'mail2athlete.com', 'mail2atlas.com', 'mail2atom.com', 'mail2attitude.com', 'mail2auction.com', 'mail2aunt.com', 'mail2australia.com', 'mail2austria.com', 'mail2azerbaijan.com', 'mail2baby.com', 'mail2bahamas.com', 'mail2bahrain.com', 'mail2ballerina.com', 'mail2ballplayer.com', 'mail2band.com', 'mail2bangladesh.com', 'mail2bank.com', 'mail2banker.com', 'mail2bankrupt.com', 'mail2baptist.com', 'mail2bar.com', 'mail2barbados.com', 'mail2barbara.com', 'mail2barter.com', 'mail2basketball.com', 'mail2batter.com', 'mail2beach.com', 'mail2beast.com', 'mail2beatles.com', 'mail2beauty.com', 'mail2becky.com', 'mail2beijing.com', 'mail2belgium.com', 'mail2belize.com', 'mail2ben.com', 'mail2bernard.com', 'mail2beth.com', 'mail2betty.com', 'mail2beverly.com', 'mail2beyond.com', 'mail2biker.com', 'mail2bill.com', 'mail2billionaire.com', 'mail2billy.com', 'mail2bio.com', 'mail2biologist.com', 'mail2black.com', 'mail2blackbelt.com', 'mail2blake.com', 'mail2blind.com', 'mail2blonde.com', 'mail2blues.com', 'mail2bob.com', 'mail2bobby.com', 'mail2bolivia.com', 'mail2bombay.com', 'mail2bonn.com', 'mail2bookmark.com', 'mail2boreas.com', 'mail2bosnia.com', 'mail2boston.com', 'mail2botswana.com', 'mail2bradley.com', 'mail2brazil.com', 'mail2breakfast.com', 'mail2brian.com', 'mail2bride.com', 'mail2brittany.com', 'mail2broker.com', 'mail2brook.com', 'mail2bruce.com', 'mail2brunei.com', 'mail2brunette.com', 'mail2brussels.com', 'mail2bryan.com', 'mail2bug.com', 'mail2bulgaria.com', 'mail2business.com', 'mail2buy.com', 'mail2ca.com', 'mail2california.com', 'mail2calvin.com', 'mail2cambodia.com', 'mail2cameroon.com', 'mail2canada.com', 'mail2cancer.com', 'mail2capeverde.com', 'mail2capricorn.com', 'mail2cardinal.com', 'mail2cardiologist.com', 'mail2care.com', 'mail2caroline.com', 'mail2carolyn.com', 'mail2casey.com', 'mail2cat.com', 'mail2caterer.com', 'mail2cathy.com', 'mail2catlover.com', 'mail2catwalk.com', 'mail2cell.com', 'mail2chad.com', 'mail2champaign.com', 'mail2charles.com', 'mail2chef.com', 'mail2chemist.com', 'mail2cherry.com', 'mail2chicago.com', 'mail2chile.com', 'mail2china.com', 'mail2chinese.com', 'mail2chocolate.com', 'mail2christian.com', 'mail2christie.com', 'mail2christmas.com', 'mail2christy.com', 'mail2chuck.com', 'mail2cindy.com', 'mail2clark.com', 'mail2classifieds.com', 'mail2claude.com', 'mail2cliff.com', 'mail2clinic.com', 'mail2clint.com', 'mail2close.com', 'mail2club.com', 'mail2coach.com', 'mail2coastguard.com', 'mail2colin.com', 'mail2college.com', 'mail2colombia.com', 'mail2color.com', 'mail2colorado.com', 'mail2columbia.com', 'mail2comedian.com', 'mail2composer.com', 'mail2computer.com', 'mail2computers.com', 'mail2concert.com', 'mail2congo.com', 'mail2connect.com', 'mail2connecticut.com', 'mail2consultant.com', 'mail2convict.com', 'mail2cook.com', 'mail2cool.com', 'mail2cory.com', 'mail2costarica.com', 'mail2country.com', 'mail2courtney.com', 'mail2cowboy.com', 'mail2cowgirl.com', 'mail2craig.com', 'mail2crave.com', 'mail2crazy.com', 'mail2create.com', 'mail2croatia.com', 'mail2cry.com', 'mail2crystal.com', 'mail2cuba.com', 'mail2culture.com', 'mail2curt.com', 'mail2customs.com', 'mail2cute.com', 'mail2cutey.com', 'mail2cynthia.com', 'mail2cyprus.com', 'mail2czechrepublic.com', 'mail2dad.com', 'mail2dale.com', 'mail2dallas.com', 'mail2dan.com', 'mail2dana.com', 'mail2dance.com', 'mail2dancer.com', 'mail2danielle.com', 'mail2danny.com', 'mail2darlene.com', 'mail2darling.com', 'mail2darren.com', 'mail2daughter.com', 'mail2dave.com', 'mail2dawn.com', 'mail2dc.com', 'mail2dealer.com', 'mail2deanna.com', 'mail2dearest.com', 'mail2debbie.com', 'mail2debby.com', 'mail2deer.com', 'mail2delaware.com', 'mail2delicious.com', 'mail2demeter.com', 'mail2democrat.com', 'mail2denise.com', 'mail2denmark.com', 'mail2dennis.com', 'mail2dentist.com', 'mail2derek.com', 'mail2desert.com', 'mail2devoted.com', 'mail2devotion.com', 'mail2diamond.com', 'mail2diana.com', 'mail2diane.com', 'mail2diehard.com', 'mail2dilemma.com', 'mail2dillon.com', 'mail2dinner.com', 'mail2dinosaur.com', 'mail2dionysos.com', 'mail2diplomat.com', 'mail2director.com', 'mail2dirk.com', 'mail2disco.com', 'mail2dive.com', 'mail2diver.com', 'mail2divorced.com', 'mail2djibouti.com', 'mail2doctor.com', 'mail2doglover.com', 'mail2dominic.com', 'mail2dominica.com', 'mail2dominicanrepublic.com', 'mail2don.com', 'mail2donald.com', 'mail2donna.com', 'mail2doris.com', 'mail2dorothy.com', 'mail2doug.com', 'mail2dough.com', 'mail2douglas.com', 'mail2dow.com', 'mail2downtown.com', 'mail2dream.com', 'mail2dreamer.com', 'mail2dude.com', 'mail2dustin.com', 'mail2dyke.com', 'mail2dylan.com', 'mail2earl.com', 'mail2earth.com', 'mail2eastend.com', 'mail2eat.com', 'mail2economist.com', 'mail2ecuador.com', 'mail2eddie.com', 'mail2edgar.com', 'mail2edwin.com', 'mail2egypt.com', 'mail2electron.com', 'mail2eli.com', 'mail2elizabeth.com', 'mail2ellen.com', 'mail2elliot.com', 'mail2elsalvador.com', 'mail2elvis.com', 'mail2emergency.com', 'mail2emily.com', 'mail2engineer.com', 'mail2english.com', 'mail2environmentalist.com', 'mail2eos.com', 'mail2eric.com', 'mail2erica.com', 'mail2erin.com', 'mail2erinyes.com', 'mail2eris.com', 'mail2eritrea.com', 'mail2ernie.com', 'mail2eros.com', 'mail2estonia.com', 'mail2ethan.com', 'mail2ethiopia.com', 'mail2eu.com', 'mail2europe.com', 'mail2eurus.com', 'mail2eva.com', 'mail2evan.com', 'mail2evelyn.com', 'mail2everything.com', 'mail2exciting.com', 'mail2expert.com', 'mail2fairy.com', 'mail2faith.com', 'mail2fanatic.com', 'mail2fancy.com', 'mail2fantasy.com', 'mail2farm.com', 'mail2farmer.com', 'mail2fashion.com', 'mail2fat.com', 'mail2feeling.com', 'mail2female.com', 'mail2fever.com', 'mail2fighter.com', 'mail2fiji.com', 'mail2filmfestival.com', 'mail2films.com', 'mail2finance.com', 'mail2finland.com', 'mail2fireman.com', 'mail2firm.com', 'mail2fisherman.com', 'mail2flexible.com', 'mail2florence.com', 'mail2florida.com', 'mail2floyd.com', 'mail2fly.com', 'mail2fond.com', 'mail2fondness.com', 'mail2football.com', 'mail2footballfan.com', 'mail2found.com', 'mail2france.com', 'mail2frank.com', 'mail2frankfurt.com', 'mail2franklin.com', 'mail2fred.com', 'mail2freddie.com', 'mail2free.com', 'mail2freedom.com', 'mail2french.com', 'mail2freudian.com', 'mail2friendship.com', 'mail2from.com', 'mail2fun.com', 'mail2gabon.com', 'mail2gabriel.com', 'mail2gail.com', 'mail2galaxy.com', 'mail2gambia.com', 'mail2games.com', 'mail2gary.com', 'mail2gavin.com', 'mail2gemini.com', 'mail2gene.com', 'mail2genes.com', 'mail2geneva.com', 'mail2george.com', 'mail2georgia.com', 'mail2gerald.com', 'mail2german.com', 'mail2germany.com', 'mail2ghana.com', 'mail2gilbert.com', 'mail2gina.com', 'mail2girl.com', 'mail2glen.com', 'mail2gloria.com', 'mail2goddess.com', 'mail2gold.com', 'mail2golfclub.com', 'mail2golfer.com', 'mail2gordon.com', 'mail2government.com', 'mail2grab.com', 'mail2grace.com', 'mail2graham.com', 'mail2grandma.com', 'mail2grandpa.com', 'mail2grant.com', 'mail2greece.com', 'mail2green.com', 'mail2greg.com', 'mail2grenada.com', 'mail2gsm.com', 'mail2guard.com', 'mail2guatemala.com', 'mail2guy.com', 'mail2hades.com', 'mail2haiti.com', 'mail2hal.com', 'mail2handhelds.com', 'mail2hank.com', 'mail2hannah.com', 'mail2harold.com', 'mail2harry.com', 'mail2hawaii.com', 'mail2headhunter.com', 'mail2heal.com', 'mail2heather.com', 'mail2heaven.com', 'mail2hebe.com', 'mail2hecate.com', 'mail2heidi.com', 'mail2helen.com', 'mail2hell.com', 'mail2help.com', 'mail2helpdesk.com', 'mail2henry.com', 'mail2hephaestus.com', 'mail2hera.com', 'mail2hercules.com', 'mail2herman.com', 'mail2hermes.com', 'mail2hespera.com', 'mail2hestia.com', 'mail2highschool.com', 'mail2hindu.com', 'mail2hip.com', 'mail2hiphop.com', 'mail2holland.com', 'mail2holly.com', 'mail2hollywood.com', 'mail2homer.com', 'mail2honduras.com', 'mail2honey.com', 'mail2hongkong.com', 'mail2hope.com', 'mail2horse.com', 'mail2hot.com', 'mail2hotel.com', 'mail2houston.com', 'mail2howard.com', 'mail2hugh.com', 'mail2human.com', 'mail2hungary.com', 'mail2hungry.com', 'mail2hygeia.com', 'mail2hyperspace.com', 'mail2hypnos.com', 'mail2ian.com', 'mail2ice-cream.com', 'mail2iceland.com', 'mail2idaho.com', 'mail2idontknow.com', 'mail2illinois.com', 'mail2imam.com', 'mail2in.com', 'mail2india.com', 'mail2indian.com', 'mail2indiana.com', 'mail2indonesia.com', 'mail2infinity.com', 'mail2intense.com', 'mail2iowa.com', 'mail2iran.com', 'mail2iraq.com', 'mail2ireland.com', 'mail2irene.com', 'mail2iris.com', 'mail2irresistible.com', 'mail2irving.com', 'mail2irwin.com', 'mail2isaac.com', 'mail2israel.com', 'mail2italian.com', 'mail2italy.com', 'mail2jackie.com', 'mail2jacob.com', 'mail2jail.com', 'mail2jaime.com', 'mail2jake.com', 'mail2jamaica.com', 'mail2james.com', 'mail2jamie.com', 'mail2jan.com', 'mail2jane.com', 'mail2janet.com', 'mail2janice.com', 'mail2japan.com', 'mail2japanese.com', 'mail2jasmine.com', 'mail2jason.com', 'mail2java.com', 'mail2jay.com', 'mail2jazz.com', 'mail2jed.com', 'mail2jeffrey.com', 'mail2jennifer.com', 'mail2jenny.com', 'mail2jeremy.com', 'mail2jerry.com', 'mail2jessica.com', 'mail2jessie.com', 'mail2jesus.com', 'mail2jew.com', 'mail2jeweler.com', 'mail2jim.com', 'mail2jimmy.com', 'mail2joan.com', 'mail2joann.com', 'mail2joanna.com', 'mail2jody.com', 'mail2joe.com', 'mail2joel.com', 'mail2joey.com', 'mail2john.com', 'mail2join.com', 'mail2jon.com', 'mail2jonathan.com', 'mail2jones.com', 'mail2jordan.com', 'mail2joseph.com', 'mail2josh.com', 'mail2joy.com', 'mail2juan.com', 'mail2judge.com', 'mail2judy.com', 'mail2juggler.com', 'mail2julian.com', 'mail2julie.com', 'mail2jumbo.com', 'mail2junk.com', 'mail2justin.com', 'mail2justme.com', 'mail2k.ru', 'mail2kansas.com', 'mail2karate.com', 'mail2karen.com', 'mail2karl.com', 'mail2karma.com', 'mail2kathleen.com', 'mail2kathy.com', 'mail2katie.com', 'mail2kay.com', 'mail2kazakhstan.com', 'mail2keen.com', 'mail2keith.com', 'mail2kelly.com', 'mail2kelsey.com', 'mail2ken.com', 'mail2kendall.com', 'mail2kennedy.com', 'mail2kenneth.com', 'mail2kenny.com', 'mail2kentucky.com', 'mail2kenya.com', 'mail2kerry.com', 'mail2kevin.com', 'mail2kim.com', 'mail2kimberly.com', 'mail2king.com', 'mail2kirk.com', 'mail2kiss.com', 'mail2kosher.com', 'mail2kristin.com', 'mail2kurt.com', 'mail2kuwait.com', 'mail2kyle.com', 'mail2kyrgyzstan.com', 'mail2la.com', 'mail2lacrosse.com', 'mail2lance.com', 'mail2lao.com', 'mail2larry.com', 'mail2latvia.com', 'mail2laugh.com', 'mail2laura.com', 'mail2lauren.com', 'mail2laurie.com', 'mail2lawrence.com', 'mail2lawyer.com', 'mail2lebanon.com', 'mail2lee.com', 'mail2leo.com', 'mail2leon.com', 'mail2leonard.com', 'mail2leone.com', 'mail2leslie.com', 'mail2letter.com', 'mail2liberia.com', 'mail2libertarian.com', 'mail2libra.com', 'mail2libya.com', 'mail2liechtenstein.com', 'mail2life.com', 'mail2linda.com', 'mail2linux.com', 'mail2lionel.com', 'mail2lipstick.com', 'mail2liquid.com', 'mail2lisa.com', 'mail2lithuania.com', 'mail2litigator.com', 'mail2liz.com', 'mail2lloyd.com', 'mail2lois.com', 'mail2lola.com', 'mail2london.com', 'mail2looking.com', 'mail2lori.com', 'mail2lost.com', 'mail2lou.com', 'mail2louis.com', 'mail2louisiana.com', 'mail2lovable.com', 'mail2love.com', 'mail2lucky.com', 'mail2lucy.com', 'mail2lunch.com', 'mail2lust.com', 'mail2luxembourg.com', 'mail2luxury.com', 'mail2lyle.com', 'mail2lynn.com', 'mail2madagascar.com', 'mail2madison.com', 'mail2madrid.com', 'mail2maggie.com', 'mail2mail4.com', 'mail2maine.com', 'mail2malawi.com', 'mail2malaysia.com', 'mail2maldives.com', 'mail2mali.com', 'mail2malta.com', 'mail2mambo.com', 'mail2man.com', 'mail2mandy.com', 'mail2manhunter.com', 'mail2mankind.com', 'mail2many.com', 'mail2marc.com', 'mail2marcia.com', 'mail2margaret.com', 'mail2margie.com', 'mail2marhaba.com', 'mail2maria.com', 'mail2marilyn.com', 'mail2marines.com', 'mail2mark.com', 'mail2marriage.com', 'mail2married.com', 'mail2marries.com', 'mail2mars.com', 'mail2marsha.com', 'mail2marshallislands.com', 'mail2martha.com', 'mail2martin.com', 'mail2marty.com', 'mail2marvin.com', 'mail2mary.com', 'mail2maryland.com', 'mail2mason.com', 'mail2massachusetts.com', 'mail2matt.com', 'mail2matthew.com', 'mail2maurice.com', 'mail2mauritania.com', 'mail2mauritius.com', 'mail2max.com', 'mail2maxwell.com', 'mail2maybe.com', 'mail2mba.com', 'mail2me4u.com', 'mail2mechanic.com', 'mail2medieval.com', 'mail2megan.com', 'mail2mel.com', 'mail2melanie.com', 'mail2melissa.com', 'mail2melody.com', 'mail2member.com', 'mail2memphis.com', 'mail2methodist.com', 'mail2mexican.com', 'mail2mexico.com', 'mail2mgz.com', 'mail2miami.com', 'mail2michael.com', 'mail2michelle.com', 'mail2michigan.com', 'mail2mike.com', 'mail2milan.com', 'mail2milano.com', 'mail2mildred.com', 'mail2milkyway.com', 'mail2millennium.com', 'mail2millionaire.com', 'mail2milton.com', 'mail2mime.com', 'mail2mindreader.com', 'mail2mini.com', 'mail2minister.com', 'mail2minneapolis.com', 'mail2minnesota.com', 'mail2miracle.com', 'mail2missionary.com', 'mail2mississippi.com', 'mail2missouri.com', 'mail2mitch.com', 'mail2model.com', 'mail2moldova.commail2molly.com', 'mail2mom.com', 'mail2monaco.com', 'mail2money.com', 'mail2mongolia.com', 'mail2monica.com', 'mail2montana.com', 'mail2monty.com', 'mail2moon.com', 'mail2morocco.com', 'mail2morpheus.com', 'mail2mors.com', 'mail2moscow.com', 'mail2moslem.com', 'mail2mouseketeer.com', 'mail2movies.com', 'mail2mozambique.com', 'mail2mp3.com', 'mail2mrright.com', 'mail2msright.com', 'mail2museum.com', 'mail2music.com', 'mail2musician.com', 'mail2muslim.com', 'mail2my.com', 'mail2myboat.com', 'mail2mycar.com', 'mail2mycell.com', 'mail2mygsm.com', 'mail2mylaptop.com', 'mail2mymac.com', 'mail2mypager.com', 'mail2mypalm.com', 'mail2mypc.com', 'mail2myphone.com', 'mail2myplane.com', 'mail2namibia.com', 'mail2nancy.com', 'mail2nasdaq.com', 'mail2nathan.com', 'mail2nauru.com', 'mail2navy.com', 'mail2neal.com', 'mail2nebraska.com', 'mail2ned.com', 'mail2neil.com', 'mail2nelson.com', 'mail2nemesis.com', 'mail2nepal.com', 'mail2netherlands.com', 'mail2network.com', 'mail2nevada.com', 'mail2newhampshire.com', 'mail2newjersey.com', 'mail2newmexico.com', 'mail2newyork.com', 'mail2newzealand.com', 'mail2nicaragua.com', 'mail2nick.com', 'mail2nicole.com', 'mail2niger.com', 'mail2nigeria.com', 'mail2nike.com', 'mail2no.com', 'mail2noah.com', 'mail2noel.com', 'mail2noelle.com', 'mail2normal.com', 'mail2norman.com', 'mail2northamerica.com', 'mail2northcarolina.com', 'mail2northdakota.com', 'mail2northpole.com', 'mail2norway.com', 'mail2notus.com', 'mail2noway.com', 'mail2nowhere.com', 'mail2nuclear.com', 'mail2nun.com', 'mail2ny.com', 'mail2oasis.com', 'mail2oceanographer.com', 'mail2ohio.com', 'mail2ok.com', 'mail2oklahoma.com', 'mail2oliver.com', 'mail2oman.com', 'mail2one.com', 'mail2onfire.com', 'mail2online.com', 'mail2oops.com', 'mail2open.com', 'mail2ophthalmologist.com', 'mail2optometrist.com', 'mail2oregon.com', 'mail2oscars.com', 'mail2oslo.com', 'mail2painter.com', 'mail2pakistan.com', 'mail2palau.com', 'mail2pan.com', 'mail2panama.com', 'mail2paraguay.com', 'mail2paralegal.com', 'mail2paris.com', 'mail2park.com', 'mail2parker.com', 'mail2party.com', 'mail2passion.com', 'mail2pat.com', 'mail2patricia.com', 'mail2patrick.com', 'mail2patty.com', 'mail2paul.com', 'mail2paula.com', 'mail2pay.com', 'mail2peace.com', 'mail2pediatrician.com', 'mail2peggy.com', 'mail2pennsylvania.com', 'mail2perry.com', 'mail2persephone.com', 'mail2persian.com', 'mail2peru.com', 'mail2pete.com', 'mail2peter.com', 'mail2pharmacist.com', 'mail2phil.com', 'mail2philippines.com', 'mail2phoenix.com', 'mail2phonecall.com', 'mail2phyllis.com', 'mail2pickup.com', 'mail2pilot.com', 'mail2pisces.com', 'mail2planet.com', 'mail2platinum.com', 'mail2plato.com', 'mail2pluto.com', 'mail2pm.com', 'mail2podiatrist.com', 'mail2poet.com', 'mail2poland.com', 'mail2policeman.com', 'mail2policewoman.com', 'mail2politician.com', 'mail2pop.com', 'mail2pope.com', 'mail2popular.com', 'mail2portugal.com', 'mail2poseidon.com', 'mail2potatohead.com', 'mail2power.com', 'mail2presbyterian.com', 'mail2president.com', 'mail2priest.com', 'mail2prince.com', 'mail2princess.com', 'mail2producer.com', 'mail2professor.com', 'mail2protect.com', 'mail2psychiatrist.com', 'mail2psycho.com', 'mail2psychologist.com', 'mail2qatar.com', 'mail2queen.com', 'mail2rabbi.com', 'mail2race.com', 'mail2racer.com', 'mail2rachel.com', 'mail2rage.com', 'mail2rainmaker.com', 'mail2ralph.com', 'mail2randy.com', 'mail2rap.com', 'mail2rare.com', 'mail2rave.com', 'mail2ray.com', 'mail2raymond.com', 'mail2realtor.com', 'mail2rebecca.com', 'mail2recruiter.com', 'mail2recycle.com', 'mail2redhead.com', 'mail2reed.com', 'mail2reggie.com', 'mail2register.com', 'mail2rent.com', 'mail2republican.com', 'mail2resort.com', 'mail2rex.com', 'mail2rhodeisland.com', 'mail2rich.com', 'mail2richard.com', 'mail2ricky.com', 'mail2ride.com', 'mail2riley.com', 'mail2rita.com', 'mail2rob.com', 'mail2robert.com', 'mail2roberta.com', 'mail2robin.com', 'mail2rock.com', 'mail2rocker.com', 'mail2rod.com', 'mail2rodney.com', 'mail2romania.com', 'mail2rome.com', 'mail2ron.com', 'mail2ronald.com', 'mail2ronnie.com', 'mail2rose.com', 'mail2rosie.com', 'mail2roy.com', 'mail2rss.org', 'mail2rudy.com', 'mail2rugby.com', 'mail2runner.com', 'mail2russell.com', 'mail2russia.com', 'mail2russian.com', 'mail2rusty.com', 'mail2ruth.com', 'mail2rwanda.com', 'mail2ryan.com', 'mail2sa.com', 'mail2sabrina.com', 'mail2safe.com', 'mail2sagittarius.com', 'mail2sail.com', 'mail2sailor.com', 'mail2sal.com', 'mail2salaam.com', 'mail2sam.com', 'mail2samantha.com', 'mail2samoa.com', 'mail2samurai.com', 'mail2sandra.com', 'mail2sandy.com', 'mail2sanfrancisco.com', 'mail2sanmarino.com', 'mail2santa.com', 'mail2sara.com', 'mail2sarah.com', 'mail2sat.com', 'mail2saturn.com', 'mail2saudi.com', 'mail2saudiarabia.com', 'mail2save.com', 'mail2savings.com', 'mail2school.com', 'mail2scientist.com', 'mail2scorpio.com', 'mail2scott.com', 'mail2sean.com', 'mail2search.com', 'mail2seattle.com', 'mail2secretagent.com', 'mail2senate.com', 'mail2senegal.com', 'mail2sensual.com', 'mail2seth.com', 'mail2sevenseas.com', 'mail2sexy.com', 'mail2seychelles.com', 'mail2shane.com', 'mail2sharon.com', 'mail2shawn.com', 'mail2ship.com', 'mail2shirley.com', 'mail2shoot.com', 'mail2shuttle.com', 'mail2sierraleone.com', 'mail2simon.com', 'mail2singapore.com', 'mail2single.com', 'mail2site.com', 'mail2skater.com', 'mail2skier.com', 'mail2sky.com', 'mail2sleek.com', 'mail2slim.com', 'mail2slovakia.com', 'mail2slovenia.com', 'mail2smile.com', 'mail2smith.com', 'mail2smooth.com', 'mail2soccer.com', 'mail2soccerfan.com', 'mail2socialist.com', 'mail2soldier.com', 'mail2somalia.com', 'mail2son.com', 'mail2song.com', 'mail2sos.com', 'mail2sound.com', 'mail2southafrica.com', 'mail2southamerica.com', 'mail2southcarolina.com', 'mail2southdakota.com', 'mail2southkorea.com', 'mail2southpole.com', 'mail2spain.com', 'mail2spanish.com', 'mail2spare.com', 'mail2spectrum.com', 'mail2splash.com', 'mail2sponsor.com', 'mail2sports.com', 'mail2srilanka.com', 'mail2stacy.com', 'mail2stan.com', 'mail2stanley.com', 'mail2star.com', 'mail2state.com', 'mail2stephanie.com', 'mail2steve.com', 'mail2steven.com', 'mail2stewart.com', 'mail2stlouis.com', 'mail2stock.com', 'mail2stockholm.com', 'mail2stockmarket.com', 'mail2storage.com', 'mail2store.com', 'mail2strong.com', 'mail2student.com', 'mail2studio.com', 'mail2studio54.com', 'mail2stuntman.com', 'mail2subscribe.com', 'mail2sudan.com', 'mail2superstar.com', 'mail2surfer.com', 'mail2suriname.com', 'mail2susan.com', 'mail2suzie.com', 'mail2swaziland.com', 'mail2sweden.com', 'mail2sweetheart.com', 'mail2swim.com', 'mail2swimmer.com', 'mail2swiss.com', 'mail2switzerland.com', 'mail2sydney.com', 'mail2sylvia.com', 'mail2syria.com', 'mail2taboo.com', 'mail2taiwan.com', 'mail2tajikistan.com', 'mail2tammy.com', 'mail2tango.com', 'mail2tanya.com', 'mail2tanzania.com', 'mail2tara.com', 'mail2taurus.com', 'mail2taxi.com', 'mail2taxidermist.com', 'mail2taylor.com', 'mail2taz.com', 'mail2teacher.com', 'mail2technician.com', 'mail2ted.com', 'mail2telephone.com', 'mail2teletubbie.com', 'mail2tenderness.com', 'mail2tennessee.com', 'mail2tennis.com', 'mail2tennisfan.com', 'mail2terri.com', 'mail2terry.com', 'mail2test.com', 'mail2texas.com', 'mail2thailand.com', 'mail2therapy.com', 'mail2think.com', 'mail2tickets.com', 'mail2tiffany.com', 'mail2tim.com', 'mail2time.com', 'mail2timothy.com', 'mail2tina.com', 'mail2titanic.com', 'mail2toby.com', 'mail2todd.com', 'mail2togo.com', 'mail2tom.com', 'mail2tommy.com', 'mail2tonga.com', 'mail2tony.com', 'mail2touch.com', 'mail2tourist.com', 'mail2tracey.com', 'mail2tracy.com', 'mail2tramp.com', 'mail2travel.com', 'mail2traveler.com', 'mail2travis.com', 'mail2trekkie.com', 'mail2trex.com', 'mail2triallawyer.com', 'mail2trick.com', 'mail2trillionaire.com', 'mail2troy.com', 'mail2truck.com', 'mail2trump.com', 'mail2try.com', 'mail2tunisia.com', 'mail2turbo.com', 'mail2turkey.com', 'mail2turkmenistan.com', 'mail2tv.com', 'mail2tycoon.com', 'mail2tyler.com', 'mail2u4me.com', 'mail2uae.com', 'mail2uganda.com', 'mail2uk.com', 'mail2ukraine.com', 'mail2uncle.com', 'mail2unsubscribe.com', 'mail2uptown.com', 'mail2uruguay.com', 'mail2usa.com', 'mail2utah.com', 'mail2uzbekistan.com', 'mail2v.com', 'mail2vacation.com', 'mail2valentines.com', 'mail2valerie.com', 'mail2valley.com', 'mail2vamoose.com', 'mail2vanessa.com', 'mail2vanuatu.com', 'mail2venezuela.com', 'mail2venous.com', 'mail2venus.com', 'mail2vermont.com', 'mail2vickie.com', 'mail2victor.com', 'mail2victoria.com', 'mail2vienna.com', 'mail2vietnam.com', 'mail2vince.com', 'mail2virginia.com', 'mail2virgo.com', 'mail2visionary.com', 'mail2vodka.com', 'mail2volleyball.com', 'mail2waiter.com', 'mail2wallstreet.com', 'mail2wally.com', 'mail2walter.com', 'mail2warren.com', 'mail2washington.com', 'mail2wave.com', 'mail2way.com', 'mail2waycool.com', 'mail2wayne.com', 'mail2webmaster.com', 'mail2webtop.com', 'mail2webtv.com', 'mail2weird.com', 'mail2wendell.com', 'mail2wendy.com', 'mail2westend.com', 'mail2westvirginia.com', 'mail2whether.com', 'mail2whip.com', 'mail2white.com', 'mail2whitehouse.com', 'mail2whitney.com', 'mail2why.com', 'mail2wilbur.com', 'mail2wild.com', 'mail2willard.com', 'mail2willie.com', 'mail2wine.com', 'mail2winner.com', 'mail2wired.com', 'mail2wisconsin.com', 'mail2woman.com', 'mail2wonder.com', 'mail2world.com', 'mail2worship.com', 'mail2wow.com', 'mail2www.com', 'mail2wyoming.com', 'mail2xfiles.com', 'mail2xox.com', 'mail2yachtclub.com', 'mail2yahalla.com', 'mail2yemen.com', 'mail2yes.com', 'mail2yugoslavia.com', 'mail2zack.com', 'mail2zambia.com', 'mail2zenith.com', 'mail2zephir.com', 'mail2zeus.com', 'mail2zipper.com', 'mail2zoo.com', 'mail2zoologist.com', 'mail2zurich.com', 'mail3000.com', 'mail333.com', 'mail4trash.com', 'mail4u.info', 'mail8.com', 'mailandftp.com', 'mailandnews.com', 'mailas.com', 'mailasia.com', 'mailbidon.com', 'mailbiz.biz', 'mailblocks.com', 'mailbolt.com', 'mailbomb.net', 'mailboom.com', 'mailbox.as', 'mailbox.co.za', 'mailbox.gr', 'mailbox.hu', 'mailbox72.biz', 'mailbox80.biz', 'mailbr.com.br', 'mailbucket.org', 'mailc.net', 'mailcan.com', 'mailcat.biz', 'mailcatch.com', 'mailcc.com', 'mailchoose.co', 'mailcity.com', 'mailclub.fr', 'mailclub.net', 'mailde.de', 'mailde.info', 'maildrop.cc', 'maildrop.gq', 'maildx.com', 'mailed.ro', 'maileimer.de', 'mailexcite.com', 'mailexpire.com', 'mailfa.tk', 'mailfly.com', 'mailforce.net', 'mailforspam.com', 'mailfree.gq', 'mailfreeonline.com', 'mailfreeway.com', 'mailfs.com', 'mailftp.com', 'mailgate.gr', 'mailgate.ru', 'mailgenie.net', 'mailguard.me', 'mailhaven.com', 'mailhood.com', 'mailimate.com', 'mailin8r.com', 'mailinatar.com', 'mailinater.com', 'mailinator.com', 'mailinator.net', 'mailinator.org', 'mailinator.us', 'mailinator2.com', 'mailinblack.com', 'mailincubator.com', 'mailingaddress.org', 'mailingweb.com', 'mailisent.com', 'mailismagic.com', 'mailite.com', 'mailmate.com', 'mailme.dk', 'mailme.gq', 'mailme.ir', 'mailme.lv', 'mailme24.com', 'mailmetrash.com', 'mailmight.com', 'mailmij.nl', 'mailmoat.com', 'mailms.com', 'mailnator.com', 'mailnesia.com', 'mailnew.com', 'mailnull.com', 'mailops.com', 'mailorg.org', 'mailoye.com', 'mailpanda.com', 'mailpick.biz', 'mailpokemon.com', 'mailpost.zzn.com', 'mailpride.com', 'mailproxsy.com', 'mailpuppy.com', 'mailquack.com', 'mailrock.biz', 'mailroom.com', 'mailru.com', 'mailsac.com', 'mailscrap.com', 'mailseal.de', 'mailsent.net', 'mailserver.ru', 'mailservice.ms', 'mailshell.com', 'mailshuttle.com', 'mailsiphon.com', 'mailslapping.com', 'mailsnare.net', 'mailstart.com', 'mailstartplus.com', 'mailsurf.com', 'mailtag.com', 'mailtemp.info', 'mailto.de', 'mailtome.de', 'mailtothis.com', 'mailtrash.net', 'mailtv.net', 'mailtv.tv', 'mailueberfall.de', 'mailup.net', 'mailwire.com', 'mailworks.org', 'mailzi.ru', 'mailzilla.com', 'mailzilla.org', 'makemetheking.com', 'maktoob.com', 'malayalamtelevision.net', 'malayalapathram.com', 'male.ru', 'maltesemail.com', 'mamber.net', 'manager.de', 'manager.in.th', 'mancity.net', 'manlymail.net', 'mantrafreenet.com', 'mantramail.com', 'mantraonline.com', 'manutdfans.com', 'manybrain.com', 'marchmail.com', 'marfino.net', 'margarita.ru', 'mariah-carey.ml.org', 'mariahc.com', 'marijuana.com', 'marijuana.nl', 'marketing.lu', 'marketingfanatic.com', 'marketweighton.com', 'married-not.com', 'marriedandlovingit.com', 'marry.ru', 'marsattack.com', 'martindalemail.com', 'martinguerre.net', 'mash4077.com', 'masrawy.com', 'matmail.com', 'mauimail.com', 'mauritius.com', 'maximumedge.com', 'maxleft.com', 'maxmail.co.uk', 'mayaple.ru', 'mbox.com.au', 'mbx.cc', 'mchsi.com', 'mciworldcom.net', 'mcrmail.com', 'me-mail.hu', 'me.com', 'meanpeoplesuck.com', 'meatismurder.net', 'medical.net.au', 'medione.net', 'medmail.com', 'medscape.com', 'meetingmall.com', 'mega.zik.dj', 'megago.com', 'megamail.pt', 'megapathdsl.net', 'megapoint.com', 'mehrani.com', 'mehtaweb.com', 'meine-dateien.info', 'meine-diashow.de', 'meine-fotos.info', 'meine-urlaubsfotos.de', 'meinspamschutz.de', 'mekhong.com', 'melodymail.com', 'meloo.com', 'meltmail.com', 'members.student.com', 'menja.net', 'merda.flu.cc', 'merda.igg.biz', 'merda.nut.cc', 'merda.usa.cc', 'merseymail.com', 'mesra.net', 'message.hu', 'message.myspace.com', 'messagebeamer.de', 'messages.to', 'messagez.com', 'metacrawler.com', 'metalfan.com', 'metaping.com', 'metta.lk', 'mexicomail.com', 'mezimages.net', 'mfsa.ru', 'miatadriver.com', 'mierdamail.com', 'miesto.sk', 'mighty.co.za', 'migmail.net', 'migmail.pl', 'migumail.com', 'miho-nakayama.com', 'mikrotamanet.com', 'millionaireintraining.com', 'millionairemail.com', 'milmail.com', 'milmail.com15', 'mindless.com', 'mindspring.com', 'minermail.com', 'mini-mail.com', 'minister.com', 'ministry-of-silly-walks.de', 'mintemail.com', 'misery.net', 'misterpinball.de', 'mit.tc', 'mittalweb.com', 'mixmail.com', 'mjfrogmail.com', 'ml1.net', 'mlanime.com', 'mlb.bounce.ed10.net', 'mm.st', 'mmail.com', 'mns.ru', 'mo3gov.net', 'moakt.com', 'mobico.ru', 'mobilbatam.com', 'mobileninja.co.uk', 'mochamail.com', 'modemnet.net', 'modernenglish.com', 'modomail.com', 'mohammed.com', 'mohmal.com', 'moldova.cc', 'moldova.com', 'moldovacc.com', 'mom-mail.com', 'momslife.com', 'moncourrier.fr.nf', 'monemail.com', 'monemail.fr.nf', 'money.net', 'mongol.net', 'monmail.fr.nf', 'monsieurcinema.com', 'montevideo.com.uy', 'monumentmail.com', 'moomia.com', 'moonman.com', 'moose-mail.com', 'mor19.uu.gl', 'mortaza.com', 'mosaicfx.com', 'moscowmail.com', 'mosk.ru', 'most-wanted.com', 'mostlysunny.com', 'motorcyclefan.net', 'motormania.com', 'movemail.com', 'movieemail.net', 'movieluver.com', 'mox.pp.ua', 'mozartmail.com', 'mozhno.net', 'mp3haze.com', 'mp4.it', 'mr-potatohead.com', 'mrpost.com', 'mrspender.com', 'mscold.com', 'msgbox.com', 'msn.cn', 'msn.com', 'msn.nl', 'msx.ru', 'mt2009.com', 'mt2014.com', 'mt2015.com', 'mt2016.com', 'mttestdriver.com', 'muehlacker.tk', 'muell.icu', 'muellemail.com', 'muellmail.com', 'multiplechoices', 'mundomail.net', 'munich.com', 'music.com', 'music.com19', 'music.maigate.ru', 'musician.com', 'musician.org', 'musicscene.org', 'muskelshirt.de', 'muslim.com', 'muslimemail.com', 'muslimsonline.com', 'mutantweb.com', 'mvrht.com', 'my.com', 'my10minutemail.com', 'mybox.it', 'mycabin.com', 'mycampus.com', 'mycard.net.ua', 'mycity.com', 'mycleaninbox.net', 'mycool.com', 'mydomain.com', 'mydotcomaddress.com', 'myfairpoint.net', 'myfamily.com', 'myfastmail.com', 'myfunnymail.com', 'mygo.com', 'myiris.com', 'myjazzmail.com', 'mymac.ru', 'mymacmail.com', 'mymail-in.net', 'mymail.ro', 'mynamedot.com', 'mynet.com', 'mynetaddress.com', 'mynetstore.de', 'myotw.net', 'myownemail.com', 'myownfriends.com', 'mypacks.net', 'mypad.com', 'mypartyclip.de', 'mypersonalemail.com', 'myphantomemail.com', 'myplace.com', 'myrambler.ru', 'myrealbox.com', 'myremarq.com', 'mysamp.de', 'myself.com', 'myspaceinc.net', 'myspamless.com', 'mystupidjob.com', 'mytemp.email', 'mytempemail.com', 'mytempmail.com', 'mythirdage.com', 'mytrashmail.com', 'myway.com', 'myworldmail.com', 'n2.com', 'n2baseball.com', 'n2business.com', 'n2mail.com', 'n2soccer.com', 'n2software.com', 'nabc.biz', 'nabuma.com', 'nafe.com', 'nagarealm.com', 'nagpal.net', 'nakedgreens.com', 'name.com', 'nameplanet.com', 'nanaseaikawa.com', 'nandomail.com', 'naplesnews.net', 'naseej.com', 'nate.com', 'nativestar.net', 'nativeweb.net', 'naui.net', 'nauticom.net', 'naver.com', 'navigator.lv', 'navy.org', 'naz.com', 'nc.rr.com', 'nc.ru', 'nchoicemail.com', 'neeva.net', 'nekto.com', 'nekto.net', 'nekto.ru', 'nemra1.com', 'nenter.com', 'neo.rr.com', 'neomailbox.com', 'nepwk.com', 'nervhq.org', 'nervmich.net', 'nervtmich.net', 'net-c.be', 'net-c.ca', 'net-c.cat', 'net-c.com', 'net-c.es', 'net-c.fr', 'net-c.it', 'net-c.lu', 'net-c.nl', 'net-c.pl', 'net-pager.net', 'net-shopping.com', 'net.tf', 'net4b.pt', 'net4you.at', 'netaddres.ru', 'netaddress.ru', 'netbounce.com', 'netbroadcaster.com', 'netby.dk', 'netc.eu', 'netc.fr', 'netc.it', 'netc.lu', 'netc.pl', 'netcenter-vn.net', 'netcity.ru', 'netcmail.com', 'netcom.ca', 'netcom.com', 'netcourrier.com', 'netexecutive.com', 'netexpressway.com', 'netfirms.com', 'netgenie.com', 'netian.com', 'netizen.com.ar', 'netkushi.com', 'netlane.com', 'netlimit.com', 'netmail.kg', 'netmails.com', 'netmails.net', 'netman.ru', 'netmanor.com', 'netmongol.com', 'netnet.com.sg', 'netnoir.net', 'netpiper.com', 'netposta.net', 'netradiomail.com', 'netralink.com', 'netscape.com', 'netscape.net', 'netscapeonline.co.uk', 'netsero.net', 'netspace.net.au', 'netspeedway.com', 'netsquare.com', 'netster.com', 'nettaxi.com', 'nettemail.com', 'netterchef.de', 'netti.fi', 'netvigator.com', 'netzero.com', 'netzero.net', 'netzidiot.de', 'netzoola.com', 'neue-dateien.de', 'neuf.fr', 'neuro.md', 'neustreet.com', 'neverbox.com', 'newap.ru', 'newarbat.net', 'newmail.com', 'newmail.net', 'newmail.ru', 'newsboysmail.com', 'newyork.com', 'newyorkcity.com', 'nextmail.ru', 'nexxmail.com', 'nfmail.com', 'ngs.ru', 'nhmail.com', 'nice-4u.com', 'nicebush.com', 'nicegal.com', 'nicholastse.net', 'nicolastse.com', 'niepodam.pl', 'nightimeuk.com', 'nightmail.com', 'nightmail.ru', 'nikopage.com', 'nikulino.net', 'nimail.com', 'nincsmail.hu', 'ninfan.com', 'nirvanafan.com', 'nm.ru', 'nmail.cf', 'nnh.com', 'nnov.ru', 'no-spam.ws', 'no4ma.ru', 'noavar.com', 'noblepioneer.com', 'nogmailspam.info', 'nomail.pw', 'nomail.xl.cx', 'nomail2me.com', 'nomorespamemails.com', 'nonpartisan.com', 'nonspam.eu', 'nonspammer.de', 'nonstopcinema.com', 'norika-fujiwara.com', 'norikomail.com', 'northgates.net', 'nospam.ze.tc', 'nospam4.us', 'nospamfor.us', 'nospammail.net', 'nospamthanks.info', 'notmailinator.com', 'notsharingmy.info', 'notyouagain.com', 'novogireevo.net', 'novokosino.net', 'nowhere.org', 'nowmymail.com', 'ntelos.net', 'ntlhelp.net', 'ntlworld.com', 'ntscan.com', 'null.net', 'nullbox.info', 'numep.ru', 'nur-fuer-spam.de', 'nurfuerspam.de', 'nus.edu.sg', 'nuvse.com', 'nwldx.com', 'nwytg.net', 'nxt.ru', 'ny.com', 'nybce.com', 'nybella.com', 'nyc.com', 'nycmail.com', 'nz11.com', 'nzoomail.com', 'o-tay.com', 'o2.co.uk', 'o2.pl', 'oaklandas-fan.com', 'oath.com', 'objectmail.com', 'obobbo.com', 'oceanfree.net', 'ochakovo.net', 'odaymail.com', 'oddpost.com', 'odmail.com', 'odnorazovoe.ru', 'office-dateien.de', 'office-email.com', 'officedomain.com', 'offroadwarrior.com', 'oi.com.br', 'oicexchange.com', 'oida.icu', 'oikrach.com', 'ok.kz', 'ok.net', 'ok.ru', 'okbank.com', 'okhuman.com', 'okmad.com', 'okmagic.com', 'okname.net', 'okuk.com', 'oldbuthealthy.com', 'oldies1041.com', 'oldies104mail.com', 'ole.com', 'olemail.com', 'olg.com', 'oligarh.ru', 'olympist.net', 'olypmall.ru', 'omaninfo.com', 'omen.ru', 'ondikoi.com', 'onebox.com', 'onenet.com.ar', 'oneoffemail.com', 'oneoffmail.com', 'onet.com.pl', 'onet.eu', 'onet.pl', 'onewaymail.com', 'oninet.pt', 'onlatedotcom.info', 'online.de', 'online.ie', 'online.ms', 'online.nl', 'online.ru', 'onlinecasinogamblings.com', 'onlinewiz.com', 'onmicrosoft.com', 'onmilwaukee.com', 'onobox.com', 'onvillage.com', 'oopi.org', 'op.pl', 'opayq.com', 'opendiary.com', 'openmailbox.org', 'operafan.com', 'operamail.com', 'opoczta.pl', 'optician.com', 'optonline.net', 'optusnet.com.au', 'orange.fr', 'orange.net', 'orbitel.bg', 'ordinaryamerican.net', 'orgmail.net', 'orthodontist.net', 'osite.com.br', 'oso.com', 'otakumail.com', 'otherinbox.com', 'our-computer.com', 'our-office.com', 'our.st', 'ourbrisbane.com', 'ourklips.com', 'ournet.md', 'outel.com', 'outgun.com', 'outlawspam.com', 'outlook.at', 'outlook.be', 'outlook.cl', 'outlook.co.id', 'outlook.co.il', 'outlook.co.nz', 'outlook.co.th', 'outlook.com', 'outlook.com.au', 'outlook.com.br', 'outlook.com.gr', 'outlook.com.pe', 'outlook.com.tr', 'outlook.com.vn', 'outlook.cz', 'outlook.de', 'outlook.dk', 'outlook.es', 'outlook.fr', 'outlook.hu', 'outlook.ie', 'outlook.in', 'outlook.it', 'outlook.jp', 'outlook.kr', 'outlook.lv', 'outlook.my', 'outlook.nl', 'outlook.ph', 'outlook.pt', 'outlook.sa', 'outlook.sg', 'outlook.sk', 'outloook.com', 'over-the-rainbow.com', 'ovi.com', 'ovpn.to', 'owlpic.com', 'ownmail.net', 'ozbytes.net.au', 'ozemail.com.au', 'ozz.ru', 'pacbell.net', 'pacific-ocean.com', 'pacific-re.com', 'pacificwest.com', 'packersfan.com', 'pagina.de', 'pagons.org', 'paidforsurf.com', 'pakistanmail.com', 'pakistanoye.com', 'palestinemail.com', 'pancakemail.com', 'pandawa.com', 'pandora.be', 'papierkorb.me', 'paradiseemail.com', 'paris.com', 'parkjiyoon.com', 'parrot.com', 'parsmail.com', 'partlycloudy.com', 'partybombe.de', 'partyheld.de', 'partynight.at', 'parvazi.com', 'passwordmail.com', 'pathfindermail.com', 'patmail.com', 'patmedia.net', 'patra.net', 'pconnections.net', 'pcpostal.com', 'pcsrock.com', 'pcusers.otherinbox.com', 'peachworld.com', 'pechkin.ru', 'pediatrician.com', 'pekklemail.com', 'pemail.net', 'penpen.com', 'peoplepc.com', 'peopleweb.com', 'pepbot.com', 'perfectmail.com', 'perovo.net', 'perso.be', 'personal.ro', 'personales.com', 'petlover.com', 'petml.com', 'petr.ru', 'pettypool.com', 'pezeshkpour.com', 'pfui.ru', 'phayze.com', 'phone.net', 'photo-impact.eu', 'photographer.net', 'phpbb.uu.gl', 'phreaker.net', 'phus8kajuspa.cu.cc', 'physicist.net', 'pianomail.com', 'pickupman.com', 'picusnet.com', 'piercedallover.com', 'pigeonportal.com', 'pigmail.net', 'pigpig.net', 'pilotemail.com', 'pimagop.com', 'pinoymail.com', 'pipeline.com', 'piracha.net', 'pisem.net', 'pjjkp.com', 'planet-mail.com', 'planet.nl', 'planetaccess.com', 'planetall.com', 'planetarymotion.net', 'planetdirect.com', 'planetearthinter.net', 'planetmail.com', 'planetmail.net', 'planetout.com', 'plasa.com', 'playersodds.com', 'playful.com', 'playstation.sony.com', 'plexolan.de', 'pluno.com', 'plus.com', 'plus.google.com', 'plusmail.com.br', 'pmail.net', 'pobox.com', 'pobox.hu', 'pobox.ru', 'pobox.sk', 'pochta.by', 'pochta.ru', 'pochta.ws', 'pochtamt.ru', 'poczta.fm', 'poczta.onet.pl', 'poetic.com', 'pokemail.net', 'pokemonpost.com', 'pokepost.com', 'polandmail.com', 'polbox.com', 'policeoffice.com', 'politician.com', 'politikerclub.de', 'polizisten-duzer.de', 'polyfaust.com', 'poofy.org', 'poohfan.com', 'pookmail.com', 'pool-sharks.com', 'poond.com', 'pop3.ru', 'popaccount.com', 'popmail.com', 'popsmail.com', 'popstar.com', 'populus.net', 'portableoffice.com', 'portugalmail.com', 'portugalmail.pt', 'portugalnet.com', 'positive-thinking.com', 'post.com', 'post.cz', 'post.sk', 'posta.net', 'posta.ro', 'posta.rosativa.ro.org', 'postaccesslite.com', 'postafiok.hu', 'postafree.com', 'postaweb.com', 'poste.it', 'posteo.de', 'postfach.cc', 'postinbox.com', 'postino.ch', 'postino.it', 'postmark.net', 'postmaster.co.uk', 'postmaster.twitter.com', 'postpro.net', 'pousa.com', 'powerdivas.com', 'powerfan.com', 'pp.inet.fi', 'praize.com', 'pray247.com', 'predprinimatel.ru', 'premium-mail.fr', 'premiumproducts.com', 'premiumservice.com', 'prepodavatel.ru', 'presidency.com', 'presnya.net', 'press.co.jp', 'prettierthanher.com', 'priest.com', 'primposta.com', 'primposta.hu', 'printesamargareta.ro', 'privacy.net', 'privatdemail.net', 'privy-mail.com', 'privymail.de', 'pro.hu', 'probemail.com', 'prodigy.net', 'prodigy.net.mx', 'professor.ru', 'progetplus.it', 'programist.ru', 'programmer.net', 'programozo.hu', 'proinbox.com', 'project2k.com', 'prokuratura.ru', 'prolaunch.com', 'promessage.com', 'prontomail.com', 'prontomail.compopulus.net', 'protestant.com', 'protonmail.com', 'proxymail.eu', 'prtnx.com', 'prydirect.info', 'psi.net', 'psv-supporter.com', 'ptd.net', 'public-files.de', 'public.usa.com', 'publicist.com', 'pulp-fiction.com', 'punkass.com', 'puppy.com.my', 'purinmail.com', 'purpleturtle.com', 'put2.net', 'putthisinyourspamdatabase.com', 'pwrby.com', 'q.com', 'qatar.io', 'qatarmail.com', 'qdice.com', 'qip.ru', 'qis.net', 'qmail.com', 'qprfans.com', 'qq.com', 'qrio.com', 'quackquack.com', 'quake.ru', 'quakemail.com', 'qualityservice.com', 'quantentunnel.de', 'qudsmail.com', 'quepasa.com', 'quickhosts.com', 'quickinbox.com', 'quickmail.nl', 'quickmail.ru', 'quicknet.nl', 'quickwebmail.com', 'quiklinks.com', 'quikmail.com', 'qv7.info', 'qwest.net', 'qwestoffice.net', 'r-o-o-t.com', 'r7.com', 'raakim.com', 'racedriver.com', 'racefanz.com', 'racingfan.com.au', 'racingmail.com', 'radicalz.com', 'radiku.ye.vc', 'radiologist.net', 'ragingbull.com', 'ralib.com', 'rambler.ru', 'ranmamail.com', 'rastogi.net', 'ratt-n-roll.com', 'rattle-snake.com', 'raubtierbaendiger.de', 'ravearena.com', 'ravefan.com', 'ravemail.co.za', 'ravemail.com', 'razormail.com', 'rccgmail.org', 'rcn.com', 'rcpt.at', 'realemail.net', 'realestatemail.net', 'reality-concept.club', 'reallyfast.biz', 'reallyfast.info', 'reallymymail.com', 'realradiomail.com', 'realtyagent.com', 'realtyalerts.ca', 'reborn.com', 'recode.me', 'reconmail.com', 'recursor.net', 'recycledmail.com', 'recycler.com', 'recyclermail.com', 'rediff.com', 'rediffmail.com', 'rediffmailpro.com', 'rednecks.com', 'redseven.de', 'redsfans.com', 'redwhitearmy.com', 'regbypass.com', 'reggaefan.com', 'reggafan.com', 'regiononline.com', 'registerednurses.com', 'regspaces.tk', 'reincarnate.com', 'relia.com', 'reliable-mail.com', 'religious.com', 'remail.ga', 'renren.com', 'repairman.com', 'reply.hu', 'reply.ticketmaster.com', 'represantive.com', 'representative.com', 'rescueteam.com', 'resgedvgfed.tk', 'resource.calendar.google.com', 'resumemail.com', 'retailfan.com', 'rexian.com', 'rezai.com', 'rhyta.com', 'richmondhill.com', 'rickymail.com', 'rin.ru', 'ring.by', 'riopreto.com.br', 'rklips.com', 'rmqkr.net', 'rn.com', 'ro.ru', 'roadrunner.com', 'roanokemail.com', 'rock.com', 'rocketmail.com', 'rocketship.com', 'rockfan.com', 'rodrun.com', 'rogers.com', 'rojname.com', 'rol.ro', 'rome.com', 'romymichele.com', 'roosh.com', 'rootprompt.org', 'rotfl.com', 'roughnet.com', 'royal.net', 'rpharmacist.com', 'rr.com', 'rrohio.com', 'rsub.com', 'rt.nl', 'rtrtr.com', 'ru.ru', 'rubyridge.com', 'runbox.com', 'rushpost.com', 'ruttolibero.com', 'rvshop.com', 'rxdoc.biz', 's-mail.com', 's0ny.net', 'sabreshockey.com', 'sacbeemail.com', 'saeuferleber.de', 'safarimail.com', 'safe-mail.net', 'safersignup.de', 'safetymail.info', 'safetypost.de', 'safrica.com', 'sagra.lu', 'sagra.lu.lu', 'sagra.lumarketing.lu', 'sags-per-mail.de', 'sailormoon.com', 'saint-mike.org', 'saintly.com', 'saintmail.net', 'sale-sale-sale.com', 'salehi.net', 'salesperson.net', 'samerica.com', 'samilan.net', 'samiznaetekogo.net', 'sammimail.com', 'sanchezsharks.com', 'sandelf.de', 'sanfranmail.com', 'sanook.com', 'sanriotown.com', 'santanmail.com', 'sapo.pt', 'sativa.ro.org', 'saturnfans.com', 'saturnperformance.com', 'saudia.com', 'savecougars.com', 'savelife.ml', 'saveowls.com', 'sayhi.net', 'saynotospams.com', 'sbcglbal.net', 'sbcglobal.com', 'sbcglobal.net', 'scandalmail.com', 'scanova.in', 'scanova.io', 'scarlet.nl', 'scfn.net', 'schafmail.de', 'schizo.com', 'schmusemail.de', 'schoolemail.com', 'schoolmail.com', 'schoolsucks.com', 'schreib-doch-mal-wieder.de', 'schrott-email.de', 'schweiz.org', 'sci.fi', 'science.com.au', 'scientist.com', 'scifianime.com', 'scotland.com', 'scotlandmail.com', 'scottishmail.co.uk', 'scottishtories.com', 'scottsboro.org', 'scrapbookscrapbook.com', 'scubadiving.com', 'seanet.com', 'search.ua', 'search417.com', 'searchwales.com', 'sebil.com', 'seckinmail.com', 'secret-police.com', 'secretarias.com', 'secretary.net', 'secretemail.de', 'secretservices.net', 'secure-mail.biz', 'secure-mail.cc', 'seductive.com', 'seekstoyboy.com', 'seguros.com.br', 'sekomaonline.com', 'selfdestructingmail.com', 'sellingspree.com', 'send.hu', 'sendmail.ru', 'sendme.cz', 'sendspamhere.com', 'senseless-entertainment.com', 'sent.as', 'sent.at', 'sent.com', 'sentrismail.com', 'serga.com.ar', 'servemymail.com', 'servermaps.net', 'services391.com', 'sesmail.com', 'sexmagnet.com', 'seznam.cz', 'sfr.fr', 'shahweb.net', 'shaniastuff.com', 'shared-files.de', 'sharedmailbox.org', 'sharewaredevelopers.com', 'sharklasers.com', 'sharmaweb.com', 'shaw.ca', 'she.com', 'shellov.net', 'shieldedmail.com', 'shieldemail.com', 'shiftmail.com', 'shinedyoureyes.com', 'shitaway.cf', 'shitaway.cu.cc', 'shitaway.ga', 'shitaway.gq', 'shitaway.ml', 'shitaway.tk', 'shitaway.usa.cc', 'shitmail.de', 'shitmail.me', 'shitmail.org', 'shitware.nl', 'shmeriously.com', 'shockinmytown.cu.cc', 'shootmail.com', 'shortmail.com', 'shortmail.net', 'shotgun.hu', 'showfans.com', 'showslow.de', 'shqiptar.eu', 'shuf.com', 'sialkotcity.com', 'sialkotian.com', 'sialkotoye.com', 'sibmail.com', 'sify.com', 'sigaret.net', 'silkroad.net', 'simbamail.fm', 'sina.cn', 'sina.com', 'sinamail.com', 'singapore.com', 'singles4jesus.com', 'singmail.com', 'singnet.com.sg', 'singpost.com', 'sinnlos-mail.de', 'sirindia.com', 'siteposter.net', 'skafan.com', 'skeefmail.com', 'skim.com', 'skizo.hu', 'skrx.tk', 'skunkbox.com', 'sky.com', 'skynet.be', 'slamdunkfan.com', 'slapsfromlastnight.com', 'slaskpost.se', 'slave-auctions.net', 'slickriffs.co.uk', 'slingshot.com', 'slippery.email', 'slipry.net', 'slo.net', 'slotter.com', 'sm.westchestergov.com', 'smap.4nmv.ru', 'smapxsmap.net', 'smashmail.de', 'smellfear.com', 'smellrear.com', 'smileyface.comsmithemail.net', 'sminkymail.com', 'smoothmail.com', 'sms.at', 'smtp.ru', 'snail-mail.net', 'snail-mail.ney', 'snakebite.com', 'snakemail.com', 'sndt.net', 'sneakemail.com', 'sneakmail.de', 'snet.net', 'snip.net', 'sniper.hu', 'snkmail.com', 'snoopymail.com', 'snowboarding.com', 'snowdonia.net', 'so-simple.org', 'socamail.com', 'socceraccess.com', 'socceramerica.net', 'soccermail.com', 'soccermomz.com', 'social-mailer.tk', 'socialworker.net', 'sociologist.com', 'sofimail.com', 'sofort-mail.de', 'sofortmail.de', 'softhome.net', 'sogetthis.com', 'sogou.com', 'sohu.com', 'sokolniki.net', 'sol.dk', 'solar-impact.pro', 'solcon.nl', 'soldier.hu', 'solution4u.com', 'solvemail.info', 'songwriter.net', 'sonnenkinder.org', 'soodomail.com', 'soodonims.com', 'soon.com', 'soulfoodcookbook.com', 'soundofmusicfans.com', 'southparkmail.com', 'sovsem.net', 'sp.nl', 'space-bank.com', 'space-man.com', 'space-ship.com', 'space-travel.com', 'space.com', 'spaceart.com', 'spacebank.com', 'spacemart.com', 'spacetowns.com', 'spacewar.com', 'spainmail.com', 'spam.2012-2016.ru', 'spam.care', 'spam4.me', 'spamail.de', 'spamarrest.com', 'spamavert.com', 'spambob.com', 'spambob.net', 'spambob.org', 'spambog.com', 'spambog.de', 'spambog.net', 'spambog.ru', 'spambooger.com', 'spambox.info', 'spambox.us', 'spamcannon.com', 'spamcannon.net', 'spamcero.com', 'spamcon.org', 'spamcorptastic.com', 'spamcowboy.com', 'spamcowboy.net', 'spamcowboy.org', 'spamday.com', 'spamdecoy.net', 'spameater.com', 'spameater.org', 'spamex.com', 'spamfree.eu', 'spamfree24.com', 'spamfree24.de', 'spamfree24.info', 'spamfree24.net', 'spamfree24.org', 'spamgoes.in', 'spamgourmet.com', 'spamgourmet.net', 'spamgourmet.org', 'spamherelots.com', 'spamhereplease.com', 'spamhole.com', 'spamify.com', 'spaminator.de', 'spamkill.info', 'spaml.com', 'spaml.de', 'spammotel.com', 'spamobox.com', 'spamoff.de', 'spamslicer.com', 'spamspot.com', 'spamstack.net', 'spamthis.co.uk', 'spamtroll.net', 'spankthedonkey.com', 'spartapiet.com', 'spazmail.com', 'speed.1s.fr', 'speedemail.net', 'speedpost.net', 'speedrules.com', 'speedrulz.com', 'speedy.com.ar', 'speedymail.org', 'sperke.net', 'spils.com', 'spinfinder.com', 'spiritseekers.com', 'spl.at', 'spoko.pl', 'spoofmail.de', 'sportemail.com', 'sportmail.ru', 'sportsmail.com', 'sporttruckdriver.com', 'spray.no', 'spray.se', 'sprintmail.com', 'sprynet.com', 'spybox.de', 'spymac.com', 'sraka.xyz', 'srilankan.net', 'ssl-mail.com', 'st-davids.net', 'stade.fr', 'stalag13.com', 'standalone.net', 'starbuzz.com', 'stargateradio.com', 'starmail.com', 'starmail.org', 'starmedia.com', 'starplace.com', 'starpower.net', 'starspath.com', 'start.com.au', 'starting-point.com', 'startkeys.com', 'startrekmail.com', 'starwars-fans.com', 'stealthmail.com', 'stillchronic.com', 'stinkefinger.net', 'stipte.nl', 'stockracer.com', 'stockstorm.com', 'stoned.com', 'stones.com', 'stop-my-spam.pp.ua', 'stopdropandroll.com', 'storksite.com', 'streber24.de', 'streetwisemail.com', 'stribmail.com', 'strompost.com', 'strongguy.com', 'student.su', 'studentcenter.org', 'stuffmail.de', 'subnetwork.com', 'subram.com', 'sudanmail.net', 'sudolife.me', 'sudolife.net', 'sudomail.biz', 'sudomail.com', 'sudomail.net', 'sudoverse.com', 'sudoverse.net', 'sudoweb.net', 'sudoworld.com', 'sudoworld.net', 'sueddeutsche.de', 'suhabi.com', 'suisse.org', 'sukhumvit.net', 'sul.com.br', 'sunmail1.com', 'sunpoint.net', 'sunrise-sunset.com', 'sunsgame.com', 'sunumail.sn', 'suomi24.fi', 'super-auswahl.de', 'superdada.com', 'supereva.it', 'supergreatmail.com', 'supermail.ru', 'supermailer.jp', 'superman.ru', 'superposta.com', 'superrito.com', 'superstachel.de', 'surat.com', 'suremail.info', 'surf3.net', 'surfree.com', 'surfsupnet.net', 'surfy.net', 'surgical.net', 'surimail.com', 'survivormail.com', 'susi.ml', 'sviblovo.net', 'svk.jp', 'swbell.net', 'sweb.cz', 'swedenmail.com', 'sweetville.net', 'sweetxxx.de', 'swift-mail.com', 'swiftdesk.com', 'swingeasyhithard.com', 'swingfan.com', 'swipermail.zzn.com', 'swirve.com', 'swissinfo.org', 'swissmail.com', 'swissmail.net', 'switchboardmail.com', 'switzerland.org', 'swva.net', 'sx172.com', 'sympatico.ca', 'syom.com', 'syriamail.com', 't-online.de', 't.psh.me', 't2mail.com', 'table of free mail providers used in spam', 'tafmail.com', 'takoe.com', 'takoe.net', 'takuyakimura.com', 'talk21.com', 'talkcity.com', 'talkinator.com', 'talktalk.co.uk', 'tamb.ru', 'tamil.com', 'tampabay.rr.com', 'tangmonkey.com', 'tankpolice.com', 'taotaotano.com', 'tatanova.com', 'tattooedallover.com', 'tattoofanatic.com', 'tbwt.com', 'tcc.on.ca', 'tds.net', 'teacher.com', 'teachermail.net', 'teachers.org', 'teamdiscovery.com', 'teamtulsa.net', 'tech-center.com', 'tech4peace.org', 'techemail.com', 'techie.com', 'technisamail.co.za', 'technologist.com', 'technologyandstocks.com', 'techpointer.com', 'techscout.com', 'techseek.com', 'techsniper.com', 'techspot.com', 'teenagedirtbag.com', 'teewars.org', 'tele2.nl', 'telebot.com', 'telebot.net', 'telefonica.net', 'teleline.es', 'telenet.be', 'telepac.pt', 'telerymd.com', 'teleserve.dynip.com', 'teletu.it', 'teleworm.com', 'teleworm.us', 'telfort.nl', 'telfortglasvezel.nl', 'telinco.net', 'telkom.net', 'telpage.net', 'telstra.com', 'telstra.com.au', 'telus.net', 'temp-mail.com', 'temp-mail.de', 'temp-mail.org', 'temp-mail.ru', 'temp.headstrong.de', 'tempail.com', 'tempe-mail.com', 'tempemail.biz', 'tempemail.co.za', 'tempemail.com', 'tempemail.net', 'tempinbox.co.uk', 'tempinbox.com', 'tempmail.eu', 'tempmail.it', 'tempmail.us', 'tempmail2.com', 'tempmaildemo.com', 'tempmailer.com', 'tempmailer.de', 'tempomail.fr', 'temporarioemail.com.br', 'temporaryemail.net', 'temporaryemail.us', 'temporaryforwarding.com', 'temporaryinbox.com', 'temporarymailaddress.com', 'tempthe.net', 'tempymail.com', 'temtulsa.net', 'tenchiclub.com', 'tenderkiss.com', 'tennismail.com', 'terminverpennt.de', 'terra.cl', 'terra.com', 'terra.com.ar', 'terra.com.br', 'terra.com.pe', 'terra.es', 'test.com', 'test.de', 'tfanus.com.er', 'tfbnw.net', 'tfz.net', 'tgasa.ru', 'tgma.ru', 'tgngu.ru', 'tgu.ru', 'thai.com', 'thaimail.com', 'thaimail.net', 'thanksnospam.info', 'thankyou2010.com', 'thc.st', 'the-african.com', 'the-airforce.com', 'the-aliens.com', 'the-american.com', 'the-animal.com', 'the-army.com', 'the-astronaut.com', 'the-beauty.com', 'the-big-apple.com', 'the-biker.com', 'the-boss.com', 'the-brazilian.com', 'the-canadian.com', 'the-canuck.com', 'the-captain.com', 'the-chinese.com', 'the-country.com', 'the-cowboy.com', 'the-davis-home.com', 'the-dutchman.com', 'the-eagles.com', 'the-englishman.com', 'the-fastest.net', 'the-fool.com', 'the-frenchman.com', 'the-galaxy.net', 'the-genius.com', 'the-gentleman.com', 'the-german.com', 'the-gremlin.com', 'the-hooligan.com', 'the-italian.com', 'the-japanese.com', 'the-lair.com', 'the-madman.com', 'the-mailinglist.com', 'the-marine.com', 'the-master.com', 'the-mexican.com', 'the-ministry.com', 'the-monkey.com', 'the-newsletter.net', 'the-pentagon.com', 'the-police.com', 'the-prayer.com', 'the-professional.com', 'the-quickest.com', 'the-russian.com', 'the-seasiders.com', 'the-snake.com', 'the-spaceman.com', 'the-stock-market.com', 'the-student.net', 'the-whitehouse.net', 'the-wild-west.com', 'the18th.com', 'thecoolguy.com', 'thecriminals.com', 'thedoghousemail.com', 'thedorm.com', 'theend.hu', 'theglobe.com', 'thegolfcourse.com', 'thegooner.com', 'theheadoffice.com', 'theinternetemail.com', 'thelanddownunder.com', 'thelimestones.com', 'themail.com', 'themillionare.net', 'theoffice.net', 'theplate.com', 'thepokerface.com', 'thepostmaster.net', 'theraces.com', 'theracetrack.com', 'therapist.net', 'thereisnogod.com', 'thesimpsonsfans.com', 'thestreetfighter.com', 'theteebox.com', 'thewatercooler.com', 'thewebpros.co.uk', 'thewizzard.com', 'thewizzkid.com', 'thexyz.ca', 'thexyz.cn', 'thexyz.com', 'thexyz.es', 'thexyz.fr', 'thexyz.in', 'thexyz.mobi', 'thexyz.net', 'thexyz.org', 'thezhangs.net', 'thirdage.com', 'thisgirl.com', 'thisisnotmyrealemail.com', 'thismail.net', 'thoic.com', 'thraml.com', 'thrott.com', 'throwam.com', 'throwawayemailaddress.com', 'thundermail.com', 'tibetemail.com', 'tidni.com', 'tilien.com', 'timein.net', 'timormail.com', 'tin.it', 'tipsandadvice.com', 'tiran.ru', 'tiscali.at', 'tiscali.be', 'tiscali.co.uk', 'tiscali.it', 'tiscali.lu', 'tiscali.se', 'tittbit.in', 'tizi.com', 'tkcity.com', 'tlcfan.com', 'tmail.ws', 'tmailinator.com', 'tmicha.net', 'toast.com', 'toke.com', 'tokyo.com', 'tom.com', 'toolsource.com', 'toomail.biz', 'toothfairy.com', 'topchat.com', 'topgamers.co.uk', 'topletter.com', 'topmail-files.de', 'topmail.com.ar', 'topranklist.de', 'topsurf.com', 'topteam.bg', 'toquedequeda.com', 'torba.com', 'torchmail.com', 'torontomail.com', 'tortenboxer.de', 'tot.net', 'totalmail.com', 'totalmail.de', 'totalmusic.net', 'totalsurf.com', 'toughguy.net', 'townisp.com', 'tpg.com.au', 'tradermail.info', 'trainspottingfan.com', 'trash-amil.com', 'trash-mail.at', 'trash-mail.com', 'trash-mail.de', 'trash-mail.ga', 'trash-mail.ml', 'trash2009.com', 'trash2010.com', 'trash2011.com', 'trashdevil.com', 'trashdevil.de', 'trashemail.de', 'trashmail.at', 'trashmail.com', 'trashmail.de', 'trashmail.me', 'trashmail.net', 'trashmail.org', 'trashmailer.com', 'trashymail.com', 'trashymail.net', 'travel.li', 'trayna.com', 'trbvm.com', 'trbvn.com', 'trevas.net', 'trialbytrivia.com', 'trialmail.de', 'trickmail.net', 'trillianpro.com', 'trimix.cn', 'tritium.net', 'trjam.net', 'trmailbox.com', 'tropicalstorm.com', 'truckeremail.net', 'truckers.com', 'truckerz.com', 'truckracer.com', 'truckracers.com', 'trust-me.com', 'truth247.com', 'truthmail.com', 'tsamail.co.za', 'ttml.co.in', 'tulipsmail.net', 'tunisiamail.com', 'turboprinz.de', 'turboprinzessin.de', 'turkey.com', 'turual.com', 'tushino.net', 'tut.by', 'tvcablenet.be', 'tverskie.net', 'tverskoe.net', 'tvnet.lv', 'tvstar.com', 'twc.com', 'twcny.com', 'twentylove.com', 'twinmail.de', 'twinstarsmail.com', 'tx.rr.com', 'tycoonmail.com', 'tyldd.com', 'typemail.com', 'tyt.by', 'u14269.ml', 'u2club.com', 'ua.fm', 'uae.ac', 'uaemail.com', 'ubbi.com', 'ubbi.com.br', 'uboot.com', 'uggsrock.com', 'uk2.net', 'uk2k.com', 'uk2net.com', 'uk7.net', 'uk8.net', 'ukbuilder.com', 'ukcool.com', 'ukdreamcast.com', 'ukmail.org', 'ukmax.com', 'ukr.net', 'ukrpost.net', 'ukrtop.com', 'uku.co.uk', 'ultapulta.com', 'ultimatelimos.com', 'ultra.fyi', 'ultrapostman.com', 'umail.net', 'ummah.org', 'umpire.com', 'unbounded.com', 'underwriters.com', 'unforgettable.com', 'uni.de', 'uni.de.de', 'uni.demailto.de', 'unican.es', 'unihome.com', 'unitybox.de', 'universal.pt', 'uno.ee', 'uno.it', 'unofree.it', 'unomail.com', 'unterderbruecke.de', 'uogtritons.com', 'uol.com.ar', 'uol.com.br', 'uol.com.co', 'uol.com.mx', 'uol.com.ve', 'uole.com', 'uole.com.ve', 'uolmail.com', 'uomail.com', 'upc.nl', 'upcmail.nl', 'upf.org', 'upliftnow.com', 'uplipht.com', 'uraniomail.com', 'ureach.com', 'urgentmail.biz', 'urhen.com', 'uroid.com', 'us.af', 'usa.com', 'usa.net', 'usaaccess.net', 'usanetmail.com', 'used-product.fr', 'userbeam.com', 'usermail.com', 'username.e4ward.com', 'userzap.com', 'usit.net', 'usma.net', 'usmc.net', 'uswestmail.net', 'uu.net', 'uymail.com', 'uyuyuy.com', 'uzhe.net', 'v-sexi.com', 'v8email.com', 'vaasfc4.tk', 'vahoo.com', 'valemail.net', 'valudeal.net', 'vampirehunter.com', 'varbizmail.com', 'vcmail.com', 'velnet.co.uk', 'velnet.com', 'velocall.com', 'veloxmail.com.br', 'venompen.com', 'verizon.net', 'verizonmail.com', 'verlass-mich-nicht.de', 'versatel.nl', 'verticalheaven.com', 'veryfast.biz', 'veryrealemail.com', 'veryspeedy.net', 'vfemail.net', 'vickaentb.tk', 'videotron.ca', 'viditag.com', 'viewcastmedia.com', 'viewcastmedia.net', 'vinbazar.com', 'violinmakers.co.uk', 'vip.126.com', 'vip.21cn.com', 'vip.citiz.net', 'vip.gr', 'vip.onet.pl', 'vip.qq.com', 'vip.sina.com', 'vipmail.ru', 'viralplays.com', 'virgilio.it', 'virgin.net', 'virginbroadband.com.au', 'virginmedia.com', 'virtual-mail.com', 'virtualactive.com', 'virtualguam.com', 'virtualmail.com', 'visitmail.com', 'visitweb.com', 'visto.com', 'visualcities.com', 'vivavelocity.com', 'vivianhsu.net', 'viwanet.ru', 'vjmail.com', 'vjtimail.com', 'vkcode.ru', 'vlcity.ru', 'vlmail.com', 'vnet.citiz.net', 'vnn.vn', 'vnukovo.net', 'vodafone.nl', 'vodafonethuis.nl', 'voila.fr', 'volcanomail.com', 'vollbio.de', 'volloeko.de', 'vomoto.com', 'voo.be', 'vorsicht-bissig.de', 'vorsicht-scharf.de', 'vote-democrats.com', 'vote-hillary.com', 'vote-republicans.com', 'vote4gop.org', 'votenet.com', 'vovan.ru', 'vp.pl', 'vpn.st', 'vr9.com', 'vsimcard.com', 'vubby.com', 'vyhino.net', 'w3.to', 'wahoye.com', 'walala.org', 'wales2000.net', 'walkmail.net', 'walkmail.ru', 'walla.co.il', 'wam.co.za', 'wanaboo.com', 'wanadoo.co.uk', 'wanadoo.es', 'wanadoo.fr', 'wapda.com', 'war-im-urlaub.de', 'warmmail.com', 'warpmail.net', 'warrior.hu', 'wasteland.rfc822.org', 'watchmail.com', 'waumail.com', 'wazabi.club', 'wbdet.com', 'wearab.net', 'web-contact.info', 'web-emailbox.eu', 'web-ideal.fr', 'web-mail.com.ar', 'web-mail.pp.ua', 'web-police.com', 'web.de', 'webaddressbook.com', 'webadicta.org', 'webave.com', 'webbworks.com', 'webcammail.com', 'webcity.ca', 'webcontact-france.eu', 'webdream.com', 'webemail.me', 'webemaillist.com', 'webinbox.com', 'webindia123.com', 'webjump.com', 'webm4il.info', 'webmail.bellsouth.net', 'webmail.blue', 'webmail.co.yu', 'webmail.co.za', 'webmail.fish', 'webmail.hu', 'webmail.lawyer', 'webmail.ru', 'webmail.wiki', 'webmails.com', 'webmailv.com', 'webname.com', 'webprogramming.com', 'webskulker.com', 'webstable.net', 'webstation.com', 'websurfer.co.za', 'webtopmail.com', 'webtribe.net', 'webuser.in', 'wee.my', 'weedmail.com', 'weekmail.com', 'weekonline.com', 'wefjo.grn.cc', 'weg-werf-email.de', 'wegas.ru', 'wegwerf-emails.de', 'wegwerfadresse.de', 'wegwerfemail.com', 'wegwerfemail.de', 'wegwerfmail.de', 'wegwerfmail.info', 'wegwerfmail.net', 'wegwerfmail.org', 'wegwerpmailadres.nl', 'wehshee.com', 'weibsvolk.de', 'weibsvolk.org', 'weinenvorglueck.de', 'welsh-lady.com', 'wesleymail.com', 'westnet.com', 'westnet.com.au', 'wetrainbayarea.com', 'wfgdfhj.tk', 'wh4f.org', 'whale-mail.com', 'whartontx.com', 'whatiaas.com', 'whatpaas.com', 'wheelweb.com', 'whipmail.com', 'whoever.com', 'wholefitness.com', 'whoopymail.com', 'whtjddn.33mail.com', 'whyspam.me', 'wi.rr.com', 'wi.twcbc.com', 'wickedmail.com', 'wickmail.net', 'wideopenwest.com', 'wildmail.com', 'wilemail.com', 'will-hier-weg.de', 'willhackforfood.biz', 'willselfdestruct.com', 'windowslive.com', 'windrivers.net', 'windstream.com', 'windstream.net', 'winemaven.info', 'wingnutz.com', 'winmail.com.au', 'winning.com', 'winrz.com', 'wir-haben-nachwuchs.de', 'wir-sind-cool.org', 'wirsindcool.de', 'witty.com', 'wiz.cc', 'wkbwmail.com', 'wmail.cf', 'wo.com.cn', 'woh.rr.com', 'wolf-web.com', 'wolke7.net', 'wollan.info', 'wombles.com', 'women-at-work.org', 'women-only.net', 'wonder-net.com', 'wongfaye.com', 'wooow.it', 'work4teens.com', 'worker.com', 'workmail.co.za', 'workmail.com', 'worldbreak.com', 'worldemail.com', 'worldmailer.com', 'worldnet.att.net', 'wormseo.cn', 'wosaddict.com', 'wouldilie.com', 'wovz.cu.cc', 'wow.com', 'wowgirl.com', 'wowmail.com', 'wowway.com', 'wp.pl', 'wptamail.com', 'wrestlingpages.com', 'wrexham.net', 'writeme.com', 'writemeback.com', 'writeremail.com', 'wronghead.com', 'wrongmail.com', 'wtvhmail.com', 'wwdg.com', 'www.com', 'www.e4ward.com', 'www.mailinator.com', 'www2000.net', 'wwwnew.eu', 'wx88.net', 'wxs.net', 'wyrm.supernews.com', 'x-mail.net', 'x-networks.net', 'x.ip6.li', 'x5g.com', 'xagloo.com', 'xaker.ru', 'xd.ae', 'xemaps.com', 'xents.com', 'xing886.uu.gl', 'xmail.com', 'xmaily.com', 'xmastime.com', 'xmenfans.com', 'xms.nl', 'xmsg.com', 'xoom.com', 'xoommail.com', 'xoxox.cc', 'xoxy.net', 'xpectmore.com', 'xpressmail.zzn.com', 'xs4all.nl', 'xsecurity.org', 'xsmail.com', 'xtra.co.nz', 'xtram.com', 'xuno.com', 'xww.ro', 'xy9ce.tk', 'xyz.am', 'xyzfree.net', 'xzapmail.com', 'y7mail.com', 'ya.ru', 'yada-yada.com', 'yaho.com', 'yahoo.ae', 'yahoo.at', 'yahoo.be', 'yahoo.ca', 'yahoo.ch', 'yahoo.cn', 'yahoo.co', 'yahoo.co.id', 'yahoo.co.il', 'yahoo.co.in', 'yahoo.co.jp', 'yahoo.co.kr', 'yahoo.co.nz', 'yahoo.co.th', 'yahoo.co.uk', 'yahoo.co.za', 'yahoo.com', 'yahoo.com.ar', 'yahoo.com.au', 'yahoo.com.br', 'yahoo.com.cn', 'yahoo.com.co', 'yahoo.com.hk', 'yahoo.com.is', 'yahoo.com.mx', 'yahoo.com.my', 'yahoo.com.ph', 'yahoo.com.ru', 'yahoo.com.sg', 'yahoo.com.tr', 'yahoo.com.tw', 'yahoo.com.vn', 'yahoo.cz', 'yahoo.de', 'yahoo.dk', 'yahoo.es', 'yahoo.fi', 'yahoo.fr', 'yahoo.gr', 'yahoo.hu', 'yahoo.ie', 'yahoo.in', 'yahoo.it', 'yahoo.jp', 'yahoo.net', 'yahoo.nl', 'yahoo.no', 'yahoo.pl', 'yahoo.pt', 'yahoo.ro', 'yahoo.ru', 'yahoo.se', 'yahoofs.com', 'yahoomail.com', 'yalla.com', 'yalla.com.lb', 'yalook.com', 'yam.com', 'yandex.com', 'yandex.mail', 'yandex.pl', 'yandex.ru', 'yandex.ua', 'yapost.com', 'yapped.net', 'yawmail.com', 'yclub.com', 'yeah.net', 'yebox.com', 'yeehaa.com', 'yehaa.com', 'yehey.com', 'yemenmail.com', 'yep.it', 'yepmail.net', 'yert.ye.vc', 'yesbox.net', 'yesey.net', 'yeswebmaster.com', 'ygm.com', 'yifan.net', 'ymail.com', 'ynnmail.com', 'yogamaven.com', 'yogotemail.com', 'yomail.info', 'yopmail.com', 'yopmail.fr', 'yopmail.net', 'yopmail.org', 'yopmail.pp.ua', 'yopolis.com', 'yopweb.com', 'youareadork.com', 'youmailr.com', 'youpy.com', 'your-house.com', 'your-mail.com', 'yourdomain.com', 'yourinbox.com', 'yourlifesucks.cu.cc', 'yourlover.net', 'yourname.ddns.org', 'yourname.freeservers.com', 'yournightmare.com', 'yours.com', 'yourssincerely.com', 'yoursubdomain.findhere.com', 'yoursubdomain.zzn.com', 'yourteacher.net', 'yourwap.com', 'youthfire.com', 'youthpost.com', 'youvegotmail.net', 'yuuhuu.net', 'yuurok.com', 'yyhmail.com', 'z1p.biz', 'z6.com', 'z9mail.com', 'za.com', 'zahadum.com', 'zaktouni.fr', 'zcities.com', 'zdnetmail.com', 'zdorovja.net', 'zeeks.com', 'zeepost.nl', 'zehnminuten.de', 'zehnminutenmail.de', 'zensearch.com', 'zensearch.net', 'zerocrime.org', 'zetmail.com', 'zhaowei.net', 'zhouemail.510520.org', 'ziggo.nl', 'zing.vn', 'zionweb.org', 'zip.net', 'zipido.com', 'ziplink.net', 'ziplip.com', 'zipmail.com', 'zipmail.com.br', 'zipmax.com', 'zippymail.info', 'zmail.pt', 'zmail.ru', 'zoemail.com', 'zoemail.net', 'zoemail.org', 'zoho.com', 'zomg.info', 'zonai.com', 'zoneview.net', 'zonnet.nl', 'zooglemail.com', 'zoominternet.net', 'zubee.com', 'zuvio.com', 'zuzzurello.com', 'zvmail.com', 'zwallet.com', 'zweb.in', 'zxcv.com', 'zxcvbnm.com', 'zybermail.com', 'zydecofan.com', 'zzn.com', 'zzom.co.uk', 'zzz.com', 'zzz.pl']
}
| 17,767
| 104,882
| 0.692942
| 13,649
| 106,602
| 5.412045
| 0.428603
| 0.003452
| 0.001462
| 0.000542
| 0.00019
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019736
| 0.059849
| 106,602
| 5
| 104,883
| 21,320.4
| 0.717318
| 0
| 0
| 0
| 0
| 0
| 0.761363
| 0.025686
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
60c21279d9008e76490e6e639b81571299cc4d82
| 99
|
py
|
Python
|
minidump/streams/TokenStream.py
|
mrexodia/minidump
|
4945b1011ac202c58003b0198820bc8521eb5af5
|
[
"MIT"
] | 215
|
2021-11-20T22:39:33.000Z
|
2022-03-26T01:33:36.000Z
|
minidump/streams/TokenStream.py
|
mrexodia/minidump
|
4945b1011ac202c58003b0198820bc8521eb5af5
|
[
"MIT"
] | 24
|
2019-04-15T17:50:15.000Z
|
2022-03-30T11:50:26.000Z
|
minidump/streams/TokenStream.py
|
mrexodia/minidump
|
4945b1011ac202c58003b0198820bc8521eb5af5
|
[
"MIT"
] | 36
|
2018-05-26T21:10:52.000Z
|
2021-11-20T14:26:58.000Z
|
#!/usr/bin/env python3
#
# Author:
# Tamas Jos (@skelsec)
#
# TODO: implement this, no MSDN aricle
| 16.5
| 38
| 0.666667
| 14
| 99
| 4.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.171717
| 99
| 6
| 38
| 16.5
| 0.792683
| 0.888889
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.166667
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
60c223fd8d8e1cc270d1f6ebcf67f45fcf672e37
| 51
|
py
|
Python
|
pip_conda_demo/hello_world/__init__.py
|
benthayer/pip_conda_demo
|
fe230cd8abc720dfe5b67e3048d1c315b55f905b
|
[
"MIT"
] | 10
|
2019-06-10T07:20:06.000Z
|
2021-12-15T13:28:43.000Z
|
pip_conda_demo/hello_world/__init__.py
|
benthayer/pip_conda_demo
|
fe230cd8abc720dfe5b67e3048d1c315b55f905b
|
[
"MIT"
] | 2
|
2019-07-10T19:24:36.000Z
|
2020-10-06T20:12:40.000Z
|
pip_conda_demo/hello_world/__init__.py
|
benthayer/pip_conda_demo
|
fe230cd8abc720dfe5b67e3048d1c315b55f905b
|
[
"MIT"
] | 4
|
2020-09-03T19:04:54.000Z
|
2021-12-15T13:28:53.000Z
|
from pip_conda_demo.hello_world import hello_world
| 25.5
| 50
| 0.901961
| 9
| 51
| 4.666667
| 0.777778
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 51
| 1
| 51
| 51
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
60d9d8e681dd1cd26b67776b7bc47f2e8854bf3f
| 79
|
py
|
Python
|
dragontail/content/models/__init__.py
|
tracon/dragontail
|
aae860acb5fe400015557f659b6d4221b939747a
|
[
"MIT"
] | null | null | null |
dragontail/content/models/__init__.py
|
tracon/dragontail
|
aae860acb5fe400015557f659b6d4221b939747a
|
[
"MIT"
] | null | null | null |
dragontail/content/models/__init__.py
|
tracon/dragontail
|
aae860acb5fe400015557f659b6d4221b939747a
|
[
"MIT"
] | null | null | null |
from .basicpage import BasicPage
from .templatesettings import TemplateSettings
| 39.5
| 46
| 0.886076
| 8
| 79
| 8.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 2
| 46
| 39.5
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
71534238bafc6a944c7263732bf0c570e2128eab
| 190,913
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/insights/outputs.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/insights/outputs.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/insights/outputs.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._enums import *
__all__ = [
'ActionGroupResponse',
'ActionListResponse',
'AlertRuleAllOfConditionResponse',
'AlertRuleAnyOfOrLeafConditionResponse',
'AlertRuleLeafConditionResponse',
'AlertingActionResponse',
'ApplicationInsightsComponentAnalyticsItemPropertiesResponse',
'ApplicationInsightsComponentDataVolumeCapResponse',
'ArmRoleReceiverResponse',
'AutomationRunbookReceiverResponse',
'AutoscaleNotificationResponse',
'AutoscaleProfileResponse',
'AzNsActionGroupResponse',
'AzureAppPushReceiverResponse',
'AzureFunctionReceiverResponse',
'CriteriaResponse',
'DataCollectionRuleResponseDataSources',
'DataCollectionRuleResponseDestinations',
'DataFlowResponse',
'DataSourceConfigurationResponse',
'DataSourceResponse',
'DestinationsSpecResponseAzureMonitorMetrics',
'DimensionResponse',
'DynamicMetricCriteriaResponse',
'DynamicThresholdFailingPeriodsResponse',
'EmailNotificationResponse',
'EmailReceiverResponse',
'EtwEventConfigurationResponse',
'EtwProviderConfigurationResponse',
'EventLogConfigurationResponse',
'ExtensionDataSourceResponse',
'ItsmReceiverResponse',
'LocationThresholdRuleConditionResponse',
'LogAnalyticsDestinationResponse',
'LogMetricTriggerResponse',
'LogSettingsResponse',
'LogToMetricActionResponse',
'LogicAppReceiverResponse',
'ManagedIdentityResponse',
'ManagementEventAggregationConditionResponse',
'ManagementEventRuleConditionResponse',
'MetricAlertActionResponse',
'MetricAlertMultipleResourceMultipleMetricCriteriaResponse',
'MetricAlertSingleResourceMultipleMetricCriteriaResponse',
'MetricCriteriaResponse',
'MetricDimensionResponse',
'MetricSettingsResponse',
'MetricTriggerResponse',
'PerfCounterDataSourceResponse',
'PerformanceCounterConfigurationResponse',
'PrivateEndpointConnectionResponse',
'PrivateEndpointPropertyResponse',
'PrivateLinkScopedResourceResponse',
'PrivateLinkServiceConnectionStatePropertyResponse',
'RecurrenceResponse',
'RecurrentScheduleResponse',
'RetentionPolicyResponse',
'RuleEmailActionResponse',
'RuleManagementEventClaimsDataSourceResponse',
'RuleManagementEventDataSourceResponse',
'RuleMetricDataSourceResponse',
'RuleWebhookActionResponse',
'ScaleActionResponse',
'ScaleCapacityResponse',
'ScaleRuleMetricDimensionResponse',
'ScaleRuleResponse',
'ScheduleResponse',
'SinkConfigurationResponse',
'SmsReceiverResponse',
'SourceResponse',
'SubscriptionLogSettingsResponse',
'SyslogDataSourceResponse',
'ThresholdRuleConditionResponse',
'TimeWindowResponse',
'TriggerConditionResponse',
'UserAssignedIdentitiesResponse',
'VoiceReceiverResponse',
'WebTestGeolocationResponse',
'WebTestPropertiesResponseConfiguration',
'WebhookNotificationResponse',
'WebhookReceiverResponse',
'WebtestLocationAvailabilityCriteriaResponse',
'WindowsEventLogDataSourceResponse',
'WorkbookTemplateGalleryResponse',
'WorkbookTemplateLocalizedGalleryResponse',
]
@pulumi.output_type
class ActionGroupResponse(dict):
"""
A pointer to an Azure Action Group.
"""
def __init__(__self__, *,
action_group_id: str,
webhook_properties: Optional[Mapping[str, str]] = None):
"""
A pointer to an Azure Action Group.
:param str action_group_id: The resource ID of the Action Group. This cannot be null or empty.
:param Mapping[str, str] webhook_properties: the dictionary of custom properties to include with the post operation. These data are appended to the webhook payload.
"""
pulumi.set(__self__, "action_group_id", action_group_id)
if webhook_properties is not None:
pulumi.set(__self__, "webhook_properties", webhook_properties)
@property
@pulumi.getter(name="actionGroupId")
def action_group_id(self) -> str:
"""
The resource ID of the Action Group. This cannot be null or empty.
"""
return pulumi.get(self, "action_group_id")
@property
@pulumi.getter(name="webhookProperties")
def webhook_properties(self) -> Optional[Mapping[str, str]]:
"""
the dictionary of custom properties to include with the post operation. These data are appended to the webhook payload.
"""
return pulumi.get(self, "webhook_properties")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ActionListResponse(dict):
"""
A list of Activity Log Alert rule actions.
"""
def __init__(__self__, *,
action_groups: Optional[Sequence['outputs.ActionGroupResponse']] = None):
"""
A list of Activity Log Alert rule actions.
:param Sequence['ActionGroupResponseArgs'] action_groups: The list of the Action Groups.
"""
if action_groups is not None:
pulumi.set(__self__, "action_groups", action_groups)
@property
@pulumi.getter(name="actionGroups")
def action_groups(self) -> Optional[Sequence['outputs.ActionGroupResponse']]:
"""
The list of the Action Groups.
"""
return pulumi.get(self, "action_groups")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertRuleAllOfConditionResponse(dict):
"""
An Activity Log Alert rule condition that is met when all its member conditions are met.
"""
def __init__(__self__, *,
all_of: Sequence['outputs.AlertRuleAnyOfOrLeafConditionResponse']):
"""
An Activity Log Alert rule condition that is met when all its member conditions are met.
:param Sequence['AlertRuleAnyOfOrLeafConditionResponseArgs'] all_of: The list of Activity Log Alert rule conditions.
"""
pulumi.set(__self__, "all_of", all_of)
@property
@pulumi.getter(name="allOf")
def all_of(self) -> Sequence['outputs.AlertRuleAnyOfOrLeafConditionResponse']:
"""
The list of Activity Log Alert rule conditions.
"""
return pulumi.get(self, "all_of")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertRuleAnyOfOrLeafConditionResponse(dict):
"""
An Activity Log Alert rule condition that is met when all its member conditions are met.
Each condition can be of one of the following types:
__Important__: Each type has its unique subset of properties. Properties from different types CANNOT exist in one condition.
* __Leaf Condition -__ must contain 'field' and either 'equals' or 'containsAny'.
_Please note, 'anyOf' should __not__ be set in a Leaf Condition._
* __AnyOf Condition -__ must contain __only__ 'anyOf' (which is an array of Leaf Conditions).
_Please note, 'field', 'equals' and 'containsAny' should __not__ be set in an AnyOf Condition._
"""
def __init__(__self__, *,
any_of: Optional[Sequence['outputs.AlertRuleLeafConditionResponse']] = None,
contains_any: Optional[Sequence[str]] = None,
equals: Optional[str] = None,
field: Optional[str] = None):
"""
An Activity Log Alert rule condition that is met when all its member conditions are met.
Each condition can be of one of the following types:
__Important__: Each type has its unique subset of properties. Properties from different types CANNOT exist in one condition.
* __Leaf Condition -__ must contain 'field' and either 'equals' or 'containsAny'.
_Please note, 'anyOf' should __not__ be set in a Leaf Condition._
* __AnyOf Condition -__ must contain __only__ 'anyOf' (which is an array of Leaf Conditions).
_Please note, 'field', 'equals' and 'containsAny' should __not__ be set in an AnyOf Condition._
:param Sequence['AlertRuleLeafConditionResponseArgs'] any_of: An Activity Log Alert rule condition that is met when at least one of its member leaf conditions are met.
:param Sequence[str] contains_any: The value of the event's field will be compared to the values in this array (case-insensitive) to determine if the condition is met.
:param str equals: The value of the event's field will be compared to this value (case-insensitive) to determine if the condition is met.
:param str field: The name of the Activity Log event's field that this condition will examine.
The possible values for this field are (case-insensitive): 'resourceId', 'category', 'caller', 'level', 'operationName', 'resourceGroup', 'resourceProvider', 'status', 'subStatus', 'resourceType', or anything beginning with 'properties'.
"""
if any_of is not None:
pulumi.set(__self__, "any_of", any_of)
if contains_any is not None:
pulumi.set(__self__, "contains_any", contains_any)
if equals is not None:
pulumi.set(__self__, "equals", equals)
if field is not None:
pulumi.set(__self__, "field", field)
@property
@pulumi.getter(name="anyOf")
def any_of(self) -> Optional[Sequence['outputs.AlertRuleLeafConditionResponse']]:
"""
An Activity Log Alert rule condition that is met when at least one of its member leaf conditions are met.
"""
return pulumi.get(self, "any_of")
@property
@pulumi.getter(name="containsAny")
def contains_any(self) -> Optional[Sequence[str]]:
"""
The value of the event's field will be compared to the values in this array (case-insensitive) to determine if the condition is met.
"""
return pulumi.get(self, "contains_any")
@property
@pulumi.getter
def equals(self) -> Optional[str]:
"""
The value of the event's field will be compared to this value (case-insensitive) to determine if the condition is met.
"""
return pulumi.get(self, "equals")
@property
@pulumi.getter
def field(self) -> Optional[str]:
"""
The name of the Activity Log event's field that this condition will examine.
The possible values for this field are (case-insensitive): 'resourceId', 'category', 'caller', 'level', 'operationName', 'resourceGroup', 'resourceProvider', 'status', 'subStatus', 'resourceType', or anything beginning with 'properties'.
"""
return pulumi.get(self, "field")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertRuleLeafConditionResponse(dict):
"""
An Activity Log Alert rule condition that is met by comparing the field and value of an Activity Log event.
This condition must contain 'field' and either 'equals' or 'containsAny'.
"""
def __init__(__self__, *,
contains_any: Optional[Sequence[str]] = None,
equals: Optional[str] = None,
field: Optional[str] = None):
"""
An Activity Log Alert rule condition that is met by comparing the field and value of an Activity Log event.
This condition must contain 'field' and either 'equals' or 'containsAny'.
:param Sequence[str] contains_any: The value of the event's field will be compared to the values in this array (case-insensitive) to determine if the condition is met.
:param str equals: The value of the event's field will be compared to this value (case-insensitive) to determine if the condition is met.
:param str field: The name of the Activity Log event's field that this condition will examine.
The possible values for this field are (case-insensitive): 'resourceId', 'category', 'caller', 'level', 'operationName', 'resourceGroup', 'resourceProvider', 'status', 'subStatus', 'resourceType', or anything beginning with 'properties'.
"""
if contains_any is not None:
pulumi.set(__self__, "contains_any", contains_any)
if equals is not None:
pulumi.set(__self__, "equals", equals)
if field is not None:
pulumi.set(__self__, "field", field)
@property
@pulumi.getter(name="containsAny")
def contains_any(self) -> Optional[Sequence[str]]:
"""
The value of the event's field will be compared to the values in this array (case-insensitive) to determine if the condition is met.
"""
return pulumi.get(self, "contains_any")
@property
@pulumi.getter
def equals(self) -> Optional[str]:
"""
The value of the event's field will be compared to this value (case-insensitive) to determine if the condition is met.
"""
return pulumi.get(self, "equals")
@property
@pulumi.getter
def field(self) -> Optional[str]:
"""
The name of the Activity Log event's field that this condition will examine.
The possible values for this field are (case-insensitive): 'resourceId', 'category', 'caller', 'level', 'operationName', 'resourceGroup', 'resourceProvider', 'status', 'subStatus', 'resourceType', or anything beginning with 'properties'.
"""
return pulumi.get(self, "field")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AlertingActionResponse(dict):
"""
Specify action need to be taken when rule type is Alert
"""
def __init__(__self__, *,
odata_type: str,
severity: str,
trigger: 'outputs.TriggerConditionResponse',
azns_action: Optional['outputs.AzNsActionGroupResponse'] = None,
throttling_in_min: Optional[int] = None):
"""
Specify action need to be taken when rule type is Alert
:param str odata_type: Specifies the action. Supported values - AlertingAction, LogToMetricAction
Expected value is 'Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.AlertingAction'.
:param str severity: Severity of the alert
:param 'TriggerConditionResponseArgs' trigger: The trigger condition that results in the alert rule being.
:param 'AzNsActionGroupResponseArgs' azns_action: Azure action group reference.
:param int throttling_in_min: time (in minutes) for which Alerts should be throttled or suppressed.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.AlertingAction')
pulumi.set(__self__, "severity", severity)
pulumi.set(__self__, "trigger", trigger)
if azns_action is not None:
pulumi.set(__self__, "azns_action", azns_action)
if throttling_in_min is not None:
pulumi.set(__self__, "throttling_in_min", throttling_in_min)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
Specifies the action. Supported values - AlertingAction, LogToMetricAction
Expected value is 'Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.AlertingAction'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter
def severity(self) -> str:
"""
Severity of the alert
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter
def trigger(self) -> 'outputs.TriggerConditionResponse':
"""
The trigger condition that results in the alert rule being.
"""
return pulumi.get(self, "trigger")
@property
@pulumi.getter(name="aznsAction")
def azns_action(self) -> Optional['outputs.AzNsActionGroupResponse']:
"""
Azure action group reference.
"""
return pulumi.get(self, "azns_action")
@property
@pulumi.getter(name="throttlingInMin")
def throttling_in_min(self) -> Optional[int]:
"""
time (in minutes) for which Alerts should be throttled or suppressed.
"""
return pulumi.get(self, "throttling_in_min")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationInsightsComponentAnalyticsItemPropertiesResponse(dict):
"""
A set of properties that can be defined in the context of a specific item type. Each type may have its own properties.
"""
def __init__(__self__, *,
function_alias: Optional[str] = None):
"""
A set of properties that can be defined in the context of a specific item type. Each type may have its own properties.
:param str function_alias: A function alias, used when the type of the item is Function
"""
if function_alias is not None:
pulumi.set(__self__, "function_alias", function_alias)
@property
@pulumi.getter(name="functionAlias")
def function_alias(self) -> Optional[str]:
"""
A function alias, used when the type of the item is Function
"""
return pulumi.get(self, "function_alias")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationInsightsComponentDataVolumeCapResponse(dict):
"""
An Application Insights component daily data volume cap
"""
def __init__(__self__, *,
max_history_cap: float,
reset_time: int,
cap: Optional[float] = None,
stop_send_notification_when_hit_cap: Optional[bool] = None,
stop_send_notification_when_hit_threshold: Optional[bool] = None,
warning_threshold: Optional[int] = None):
"""
An Application Insights component daily data volume cap
:param float max_history_cap: Maximum daily data volume cap that the user can set for this component.
:param int reset_time: Daily data volume cap UTC reset hour.
:param float cap: Daily data volume cap in GB.
:param bool stop_send_notification_when_hit_cap: Do not send a notification email when the daily data volume cap is met.
:param bool stop_send_notification_when_hit_threshold: Reserved, not used for now.
:param int warning_threshold: Reserved, not used for now.
"""
pulumi.set(__self__, "max_history_cap", max_history_cap)
pulumi.set(__self__, "reset_time", reset_time)
if cap is not None:
pulumi.set(__self__, "cap", cap)
if stop_send_notification_when_hit_cap is not None:
pulumi.set(__self__, "stop_send_notification_when_hit_cap", stop_send_notification_when_hit_cap)
if stop_send_notification_when_hit_threshold is not None:
pulumi.set(__self__, "stop_send_notification_when_hit_threshold", stop_send_notification_when_hit_threshold)
if warning_threshold is not None:
pulumi.set(__self__, "warning_threshold", warning_threshold)
@property
@pulumi.getter(name="maxHistoryCap")
def max_history_cap(self) -> float:
"""
Maximum daily data volume cap that the user can set for this component.
"""
return pulumi.get(self, "max_history_cap")
@property
@pulumi.getter(name="resetTime")
def reset_time(self) -> int:
"""
Daily data volume cap UTC reset hour.
"""
return pulumi.get(self, "reset_time")
@property
@pulumi.getter
def cap(self) -> Optional[float]:
"""
Daily data volume cap in GB.
"""
return pulumi.get(self, "cap")
@property
@pulumi.getter(name="stopSendNotificationWhenHitCap")
def stop_send_notification_when_hit_cap(self) -> Optional[bool]:
"""
Do not send a notification email when the daily data volume cap is met.
"""
return pulumi.get(self, "stop_send_notification_when_hit_cap")
@property
@pulumi.getter(name="stopSendNotificationWhenHitThreshold")
def stop_send_notification_when_hit_threshold(self) -> Optional[bool]:
"""
Reserved, not used for now.
"""
return pulumi.get(self, "stop_send_notification_when_hit_threshold")
@property
@pulumi.getter(name="warningThreshold")
def warning_threshold(self) -> Optional[int]:
"""
Reserved, not used for now.
"""
return pulumi.get(self, "warning_threshold")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ArmRoleReceiverResponse(dict):
"""
An arm role receiver.
"""
def __init__(__self__, *,
name: str,
role_id: str,
use_common_alert_schema: bool):
"""
An arm role receiver.
:param str name: The name of the arm role receiver. Names must be unique across all receivers within an action group.
:param str role_id: The arm role id.
:param bool use_common_alert_schema: Indicates whether to use common alert schema.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "role_id", role_id)
pulumi.set(__self__, "use_common_alert_schema", use_common_alert_schema)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the arm role receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> str:
"""
The arm role id.
"""
return pulumi.get(self, "role_id")
@property
@pulumi.getter(name="useCommonAlertSchema")
def use_common_alert_schema(self) -> bool:
"""
Indicates whether to use common alert schema.
"""
return pulumi.get(self, "use_common_alert_schema")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutomationRunbookReceiverResponse(dict):
"""
The Azure Automation Runbook notification receiver.
"""
def __init__(__self__, *,
automation_account_id: str,
is_global_runbook: bool,
runbook_name: str,
use_common_alert_schema: bool,
webhook_resource_id: str,
name: Optional[str] = None,
service_uri: Optional[str] = None):
"""
The Azure Automation Runbook notification receiver.
:param str automation_account_id: The Azure automation account Id which holds this runbook and authenticate to Azure resource.
:param bool is_global_runbook: Indicates whether this instance is global runbook.
:param str runbook_name: The name for this runbook.
:param bool use_common_alert_schema: Indicates whether to use common alert schema.
:param str webhook_resource_id: The resource id for webhook linked to this runbook.
:param str name: Indicates name of the webhook.
:param str service_uri: The URI where webhooks should be sent.
"""
pulumi.set(__self__, "automation_account_id", automation_account_id)
pulumi.set(__self__, "is_global_runbook", is_global_runbook)
pulumi.set(__self__, "runbook_name", runbook_name)
pulumi.set(__self__, "use_common_alert_schema", use_common_alert_schema)
pulumi.set(__self__, "webhook_resource_id", webhook_resource_id)
if name is not None:
pulumi.set(__self__, "name", name)
if service_uri is not None:
pulumi.set(__self__, "service_uri", service_uri)
@property
@pulumi.getter(name="automationAccountId")
def automation_account_id(self) -> str:
"""
The Azure automation account Id which holds this runbook and authenticate to Azure resource.
"""
return pulumi.get(self, "automation_account_id")
@property
@pulumi.getter(name="isGlobalRunbook")
def is_global_runbook(self) -> bool:
"""
Indicates whether this instance is global runbook.
"""
return pulumi.get(self, "is_global_runbook")
@property
@pulumi.getter(name="runbookName")
def runbook_name(self) -> str:
"""
The name for this runbook.
"""
return pulumi.get(self, "runbook_name")
@property
@pulumi.getter(name="useCommonAlertSchema")
def use_common_alert_schema(self) -> bool:
"""
Indicates whether to use common alert schema.
"""
return pulumi.get(self, "use_common_alert_schema")
@property
@pulumi.getter(name="webhookResourceId")
def webhook_resource_id(self) -> str:
"""
The resource id for webhook linked to this runbook.
"""
return pulumi.get(self, "webhook_resource_id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Indicates name of the webhook.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceUri")
def service_uri(self) -> Optional[str]:
"""
The URI where webhooks should be sent.
"""
return pulumi.get(self, "service_uri")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscaleNotificationResponse(dict):
"""
Autoscale notification.
"""
def __init__(__self__, *,
operation: str,
email: Optional['outputs.EmailNotificationResponse'] = None,
webhooks: Optional[Sequence['outputs.WebhookNotificationResponse']] = None):
"""
Autoscale notification.
:param str operation: the operation associated with the notification and its value must be "scale"
:param 'EmailNotificationResponseArgs' email: the email notification.
:param Sequence['WebhookNotificationResponseArgs'] webhooks: the collection of webhook notifications.
"""
pulumi.set(__self__, "operation", operation)
if email is not None:
pulumi.set(__self__, "email", email)
if webhooks is not None:
pulumi.set(__self__, "webhooks", webhooks)
@property
@pulumi.getter
def operation(self) -> str:
"""
the operation associated with the notification and its value must be "scale"
"""
return pulumi.get(self, "operation")
@property
@pulumi.getter
def email(self) -> Optional['outputs.EmailNotificationResponse']:
"""
the email notification.
"""
return pulumi.get(self, "email")
@property
@pulumi.getter
def webhooks(self) -> Optional[Sequence['outputs.WebhookNotificationResponse']]:
"""
the collection of webhook notifications.
"""
return pulumi.get(self, "webhooks")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscaleProfileResponse(dict):
"""
Autoscale profile.
"""
def __init__(__self__, *,
capacity: 'outputs.ScaleCapacityResponse',
name: str,
rules: Sequence['outputs.ScaleRuleResponse'],
fixed_date: Optional['outputs.TimeWindowResponse'] = None,
recurrence: Optional['outputs.RecurrenceResponse'] = None):
"""
Autoscale profile.
:param 'ScaleCapacityResponseArgs' capacity: the number of instances that can be used during this profile.
:param str name: the name of the profile.
:param Sequence['ScaleRuleResponseArgs'] rules: the collection of rules that provide the triggers and parameters for the scaling action. A maximum of 10 rules can be specified.
:param 'TimeWindowResponseArgs' fixed_date: the specific date-time for the profile. This element is not used if the Recurrence element is used.
:param 'RecurrenceResponseArgs' recurrence: the repeating times at which this profile begins. This element is not used if the FixedDate element is used.
"""
pulumi.set(__self__, "capacity", capacity)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "rules", rules)
if fixed_date is not None:
pulumi.set(__self__, "fixed_date", fixed_date)
if recurrence is not None:
pulumi.set(__self__, "recurrence", recurrence)
@property
@pulumi.getter
def capacity(self) -> 'outputs.ScaleCapacityResponse':
"""
the number of instances that can be used during this profile.
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def name(self) -> str:
"""
the name of the profile.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def rules(self) -> Sequence['outputs.ScaleRuleResponse']:
"""
the collection of rules that provide the triggers and parameters for the scaling action. A maximum of 10 rules can be specified.
"""
return pulumi.get(self, "rules")
@property
@pulumi.getter(name="fixedDate")
def fixed_date(self) -> Optional['outputs.TimeWindowResponse']:
"""
the specific date-time for the profile. This element is not used if the Recurrence element is used.
"""
return pulumi.get(self, "fixed_date")
@property
@pulumi.getter
def recurrence(self) -> Optional['outputs.RecurrenceResponse']:
"""
the repeating times at which this profile begins. This element is not used if the FixedDate element is used.
"""
return pulumi.get(self, "recurrence")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AzNsActionGroupResponse(dict):
"""
Azure action group
"""
def __init__(__self__, *,
action_group: Optional[Sequence[str]] = None,
custom_webhook_payload: Optional[str] = None,
email_subject: Optional[str] = None):
"""
Azure action group
:param Sequence[str] action_group: Azure Action Group reference.
:param str custom_webhook_payload: Custom payload to be sent for all webhook URI in Azure action group
:param str email_subject: Custom subject override for all email ids in Azure action group
"""
if action_group is not None:
pulumi.set(__self__, "action_group", action_group)
if custom_webhook_payload is not None:
pulumi.set(__self__, "custom_webhook_payload", custom_webhook_payload)
if email_subject is not None:
pulumi.set(__self__, "email_subject", email_subject)
@property
@pulumi.getter(name="actionGroup")
def action_group(self) -> Optional[Sequence[str]]:
"""
Azure Action Group reference.
"""
return pulumi.get(self, "action_group")
@property
@pulumi.getter(name="customWebhookPayload")
def custom_webhook_payload(self) -> Optional[str]:
"""
Custom payload to be sent for all webhook URI in Azure action group
"""
return pulumi.get(self, "custom_webhook_payload")
@property
@pulumi.getter(name="emailSubject")
def email_subject(self) -> Optional[str]:
"""
Custom subject override for all email ids in Azure action group
"""
return pulumi.get(self, "email_subject")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AzureAppPushReceiverResponse(dict):
"""
The Azure mobile App push notification receiver.
"""
def __init__(__self__, *,
email_address: str,
name: str):
"""
The Azure mobile App push notification receiver.
:param str email_address: The email address registered for the Azure mobile app.
:param str name: The name of the Azure mobile app push receiver. Names must be unique across all receivers within an action group.
"""
pulumi.set(__self__, "email_address", email_address)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="emailAddress")
def email_address(self) -> str:
"""
The email address registered for the Azure mobile app.
"""
return pulumi.get(self, "email_address")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the Azure mobile app push receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AzureFunctionReceiverResponse(dict):
"""
An azure function receiver.
"""
def __init__(__self__, *,
function_app_resource_id: str,
function_name: str,
http_trigger_url: str,
name: str,
use_common_alert_schema: bool):
"""
An azure function receiver.
:param str function_app_resource_id: The azure resource id of the function app.
:param str function_name: The function name in the function app.
:param str http_trigger_url: The http trigger url where http request sent to.
:param str name: The name of the azure function receiver. Names must be unique across all receivers within an action group.
:param bool use_common_alert_schema: Indicates whether to use common alert schema.
"""
pulumi.set(__self__, "function_app_resource_id", function_app_resource_id)
pulumi.set(__self__, "function_name", function_name)
pulumi.set(__self__, "http_trigger_url", http_trigger_url)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "use_common_alert_schema", use_common_alert_schema)
@property
@pulumi.getter(name="functionAppResourceId")
def function_app_resource_id(self) -> str:
"""
The azure resource id of the function app.
"""
return pulumi.get(self, "function_app_resource_id")
@property
@pulumi.getter(name="functionName")
def function_name(self) -> str:
"""
The function name in the function app.
"""
return pulumi.get(self, "function_name")
@property
@pulumi.getter(name="httpTriggerUrl")
def http_trigger_url(self) -> str:
"""
The http trigger url where http request sent to.
"""
return pulumi.get(self, "http_trigger_url")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the azure function receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="useCommonAlertSchema")
def use_common_alert_schema(self) -> bool:
"""
Indicates whether to use common alert schema.
"""
return pulumi.get(self, "use_common_alert_schema")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CriteriaResponse(dict):
"""
Specifies the criteria for converting log to metric.
"""
def __init__(__self__, *,
metric_name: str,
dimensions: Optional[Sequence['outputs.DimensionResponse']] = None):
"""
Specifies the criteria for converting log to metric.
:param str metric_name: Name of the metric
:param Sequence['DimensionResponseArgs'] dimensions: List of Dimensions for creating metric
"""
pulumi.set(__self__, "metric_name", metric_name)
if dimensions is not None:
pulumi.set(__self__, "dimensions", dimensions)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> str:
"""
Name of the metric
"""
return pulumi.get(self, "metric_name")
@property
@pulumi.getter
def dimensions(self) -> Optional[Sequence['outputs.DimensionResponse']]:
"""
List of Dimensions for creating metric
"""
return pulumi.get(self, "dimensions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DataCollectionRuleResponseDataSources(dict):
"""
The specification of data sources.
This property is optional and can be omitted if the rule is meant to be used via direct calls to the provisioned endpoint.
"""
def __init__(__self__, *,
extensions: Optional[Sequence['outputs.ExtensionDataSourceResponse']] = None,
performance_counters: Optional[Sequence['outputs.PerfCounterDataSourceResponse']] = None,
syslog: Optional[Sequence['outputs.SyslogDataSourceResponse']] = None,
windows_event_logs: Optional[Sequence['outputs.WindowsEventLogDataSourceResponse']] = None):
"""
The specification of data sources.
This property is optional and can be omitted if the rule is meant to be used via direct calls to the provisioned endpoint.
:param Sequence['ExtensionDataSourceResponseArgs'] extensions: The list of Azure VM extension data source configurations.
:param Sequence['PerfCounterDataSourceResponseArgs'] performance_counters: The list of performance counter data source configurations.
:param Sequence['SyslogDataSourceResponseArgs'] syslog: The list of Syslog data source configurations.
:param Sequence['WindowsEventLogDataSourceResponseArgs'] windows_event_logs: The list of Windows Event Log data source configurations.
"""
if extensions is not None:
pulumi.set(__self__, "extensions", extensions)
if performance_counters is not None:
pulumi.set(__self__, "performance_counters", performance_counters)
if syslog is not None:
pulumi.set(__self__, "syslog", syslog)
if windows_event_logs is not None:
pulumi.set(__self__, "windows_event_logs", windows_event_logs)
@property
@pulumi.getter
def extensions(self) -> Optional[Sequence['outputs.ExtensionDataSourceResponse']]:
"""
The list of Azure VM extension data source configurations.
"""
return pulumi.get(self, "extensions")
@property
@pulumi.getter(name="performanceCounters")
def performance_counters(self) -> Optional[Sequence['outputs.PerfCounterDataSourceResponse']]:
"""
The list of performance counter data source configurations.
"""
return pulumi.get(self, "performance_counters")
@property
@pulumi.getter
def syslog(self) -> Optional[Sequence['outputs.SyslogDataSourceResponse']]:
"""
The list of Syslog data source configurations.
"""
return pulumi.get(self, "syslog")
@property
@pulumi.getter(name="windowsEventLogs")
def windows_event_logs(self) -> Optional[Sequence['outputs.WindowsEventLogDataSourceResponse']]:
"""
The list of Windows Event Log data source configurations.
"""
return pulumi.get(self, "windows_event_logs")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DataCollectionRuleResponseDestinations(dict):
"""
The specification of destinations.
"""
def __init__(__self__, *,
azure_monitor_metrics: Optional['outputs.DestinationsSpecResponseAzureMonitorMetrics'] = None,
log_analytics: Optional[Sequence['outputs.LogAnalyticsDestinationResponse']] = None):
"""
The specification of destinations.
:param 'DestinationsSpecResponseAzureMonitorMetricsArgs' azure_monitor_metrics: Azure Monitor Metrics destination.
:param Sequence['LogAnalyticsDestinationResponseArgs'] log_analytics: List of Log Analytics destinations.
"""
if azure_monitor_metrics is not None:
pulumi.set(__self__, "azure_monitor_metrics", azure_monitor_metrics)
if log_analytics is not None:
pulumi.set(__self__, "log_analytics", log_analytics)
@property
@pulumi.getter(name="azureMonitorMetrics")
def azure_monitor_metrics(self) -> Optional['outputs.DestinationsSpecResponseAzureMonitorMetrics']:
"""
Azure Monitor Metrics destination.
"""
return pulumi.get(self, "azure_monitor_metrics")
@property
@pulumi.getter(name="logAnalytics")
def log_analytics(self) -> Optional[Sequence['outputs.LogAnalyticsDestinationResponse']]:
"""
List of Log Analytics destinations.
"""
return pulumi.get(self, "log_analytics")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DataFlowResponse(dict):
"""
Definition of which streams are sent to which destinations.
"""
def __init__(__self__, *,
destinations: Sequence[str],
streams: Sequence[str]):
"""
Definition of which streams are sent to which destinations.
:param Sequence[str] destinations: List of destinations for this data flow.
:param Sequence[str] streams: List of streams for this data flow.
"""
pulumi.set(__self__, "destinations", destinations)
pulumi.set(__self__, "streams", streams)
@property
@pulumi.getter
def destinations(self) -> Sequence[str]:
"""
List of destinations for this data flow.
"""
return pulumi.get(self, "destinations")
@property
@pulumi.getter
def streams(self) -> Sequence[str]:
"""
List of streams for this data flow.
"""
return pulumi.get(self, "streams")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DataSourceConfigurationResponse(dict):
def __init__(__self__, *,
event_logs: Optional[Sequence['outputs.EventLogConfigurationResponse']] = None,
perf_counters: Optional[Sequence['outputs.PerformanceCounterConfigurationResponse']] = None,
providers: Optional[Sequence['outputs.EtwProviderConfigurationResponse']] = None):
"""
:param Sequence['EventLogConfigurationResponseArgs'] event_logs: Windows event logs configuration.
:param Sequence['PerformanceCounterConfigurationResponseArgs'] perf_counters: Performance counter configuration
:param Sequence['EtwProviderConfigurationResponseArgs'] providers: ETW providers configuration
"""
if event_logs is not None:
pulumi.set(__self__, "event_logs", event_logs)
if perf_counters is not None:
pulumi.set(__self__, "perf_counters", perf_counters)
if providers is not None:
pulumi.set(__self__, "providers", providers)
@property
@pulumi.getter(name="eventLogs")
def event_logs(self) -> Optional[Sequence['outputs.EventLogConfigurationResponse']]:
"""
Windows event logs configuration.
"""
return pulumi.get(self, "event_logs")
@property
@pulumi.getter(name="perfCounters")
def perf_counters(self) -> Optional[Sequence['outputs.PerformanceCounterConfigurationResponse']]:
"""
Performance counter configuration
"""
return pulumi.get(self, "perf_counters")
@property
@pulumi.getter
def providers(self) -> Optional[Sequence['outputs.EtwProviderConfigurationResponse']]:
"""
ETW providers configuration
"""
return pulumi.get(self, "providers")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DataSourceResponse(dict):
"""
Data source object contains configuration to collect telemetry and one or more sinks to send that telemetry data to
"""
def __init__(__self__, *,
configuration: 'outputs.DataSourceConfigurationResponse',
kind: str,
sinks: Sequence['outputs.SinkConfigurationResponse']):
"""
Data source object contains configuration to collect telemetry and one or more sinks to send that telemetry data to
:param str kind: Datasource kind
"""
pulumi.set(__self__, "configuration", configuration)
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "sinks", sinks)
@property
@pulumi.getter
def configuration(self) -> 'outputs.DataSourceConfigurationResponse':
return pulumi.get(self, "configuration")
@property
@pulumi.getter
def kind(self) -> str:
"""
Datasource kind
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def sinks(self) -> Sequence['outputs.SinkConfigurationResponse']:
return pulumi.get(self, "sinks")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DestinationsSpecResponseAzureMonitorMetrics(dict):
"""
Azure Monitor Metrics destination.
"""
def __init__(__self__, *,
name: str):
"""
Azure Monitor Metrics destination.
:param str name: A friendly name for the destination.
This name should be unique across all destinations (regardless of type) within the data collection rule.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> str:
"""
A friendly name for the destination.
This name should be unique across all destinations (regardless of type) within the data collection rule.
"""
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DimensionResponse(dict):
"""
Specifies the criteria for converting log to metric.
"""
def __init__(__self__, *,
name: str,
operator: str,
values: Sequence[str]):
"""
Specifies the criteria for converting log to metric.
:param str name: Name of the dimension
:param str operator: Operator for dimension values
:param Sequence[str] values: List of dimension values
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the dimension
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def operator(self) -> str:
"""
Operator for dimension values
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of dimension values
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DynamicMetricCriteriaResponse(dict):
"""
Criterion for dynamic threshold.
"""
def __init__(__self__, *,
alert_sensitivity: str,
criterion_type: str,
failing_periods: 'outputs.DynamicThresholdFailingPeriodsResponse',
metric_name: str,
name: str,
operator: str,
time_aggregation: str,
dimensions: Optional[Sequence['outputs.MetricDimensionResponse']] = None,
ignore_data_before: Optional[str] = None,
metric_namespace: Optional[str] = None,
skip_metric_validation: Optional[bool] = None):
"""
Criterion for dynamic threshold.
:param str alert_sensitivity: The extent of deviation required to trigger an alert. This will affect how tight the threshold is to the metric series pattern.
:param str criterion_type: Specifies the type of threshold criteria
Expected value is 'DynamicThresholdCriterion'.
:param 'DynamicThresholdFailingPeriodsResponseArgs' failing_periods: The minimum number of violations required within the selected lookback time window required to raise an alert.
:param str metric_name: Name of the metric.
:param str name: Name of the criteria.
:param str operator: The operator used to compare the metric value against the threshold.
:param str time_aggregation: the criteria time aggregation types.
:param Sequence['MetricDimensionResponseArgs'] dimensions: List of dimension conditions.
:param str ignore_data_before: Use this option to set the date from which to start learning the metric historical data and calculate the dynamic thresholds (in ISO8601 format)
:param str metric_namespace: Namespace of the metric.
:param bool skip_metric_validation: Allows creating an alert rule on a custom metric that isn't yet emitted, by causing the metric validation to be skipped.
"""
pulumi.set(__self__, "alert_sensitivity", alert_sensitivity)
pulumi.set(__self__, "criterion_type", 'DynamicThresholdCriterion')
pulumi.set(__self__, "failing_periods", failing_periods)
pulumi.set(__self__, "metric_name", metric_name)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "time_aggregation", time_aggregation)
if dimensions is not None:
pulumi.set(__self__, "dimensions", dimensions)
if ignore_data_before is not None:
pulumi.set(__self__, "ignore_data_before", ignore_data_before)
if metric_namespace is not None:
pulumi.set(__self__, "metric_namespace", metric_namespace)
if skip_metric_validation is not None:
pulumi.set(__self__, "skip_metric_validation", skip_metric_validation)
@property
@pulumi.getter(name="alertSensitivity")
def alert_sensitivity(self) -> str:
"""
The extent of deviation required to trigger an alert. This will affect how tight the threshold is to the metric series pattern.
"""
return pulumi.get(self, "alert_sensitivity")
@property
@pulumi.getter(name="criterionType")
def criterion_type(self) -> str:
"""
Specifies the type of threshold criteria
Expected value is 'DynamicThresholdCriterion'.
"""
return pulumi.get(self, "criterion_type")
@property
@pulumi.getter(name="failingPeriods")
def failing_periods(self) -> 'outputs.DynamicThresholdFailingPeriodsResponse':
"""
The minimum number of violations required within the selected lookback time window required to raise an alert.
"""
return pulumi.get(self, "failing_periods")
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> str:
"""
Name of the metric.
"""
return pulumi.get(self, "metric_name")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the criteria.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def operator(self) -> str:
"""
The operator used to compare the metric value against the threshold.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter(name="timeAggregation")
def time_aggregation(self) -> str:
"""
the criteria time aggregation types.
"""
return pulumi.get(self, "time_aggregation")
@property
@pulumi.getter
def dimensions(self) -> Optional[Sequence['outputs.MetricDimensionResponse']]:
"""
List of dimension conditions.
"""
return pulumi.get(self, "dimensions")
@property
@pulumi.getter(name="ignoreDataBefore")
def ignore_data_before(self) -> Optional[str]:
"""
Use this option to set the date from which to start learning the metric historical data and calculate the dynamic thresholds (in ISO8601 format)
"""
return pulumi.get(self, "ignore_data_before")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> Optional[str]:
"""
Namespace of the metric.
"""
return pulumi.get(self, "metric_namespace")
@property
@pulumi.getter(name="skipMetricValidation")
def skip_metric_validation(self) -> Optional[bool]:
"""
Allows creating an alert rule on a custom metric that isn't yet emitted, by causing the metric validation to be skipped.
"""
return pulumi.get(self, "skip_metric_validation")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DynamicThresholdFailingPeriodsResponse(dict):
"""
The minimum number of violations required within the selected lookback time window required to raise an alert.
"""
def __init__(__self__, *,
min_failing_periods_to_alert: float,
number_of_evaluation_periods: float):
"""
The minimum number of violations required within the selected lookback time window required to raise an alert.
:param float min_failing_periods_to_alert: The number of violations to trigger an alert. Should be smaller or equal to numberOfEvaluationPeriods.
:param float number_of_evaluation_periods: The number of aggregated lookback points. The lookback time window is calculated based on the aggregation granularity (windowSize) and the selected number of aggregated points.
"""
pulumi.set(__self__, "min_failing_periods_to_alert", min_failing_periods_to_alert)
pulumi.set(__self__, "number_of_evaluation_periods", number_of_evaluation_periods)
@property
@pulumi.getter(name="minFailingPeriodsToAlert")
def min_failing_periods_to_alert(self) -> float:
"""
The number of violations to trigger an alert. Should be smaller or equal to numberOfEvaluationPeriods.
"""
return pulumi.get(self, "min_failing_periods_to_alert")
@property
@pulumi.getter(name="numberOfEvaluationPeriods")
def number_of_evaluation_periods(self) -> float:
"""
The number of aggregated lookback points. The lookback time window is calculated based on the aggregation granularity (windowSize) and the selected number of aggregated points.
"""
return pulumi.get(self, "number_of_evaluation_periods")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EmailNotificationResponse(dict):
"""
Email notification of an autoscale event.
"""
def __init__(__self__, *,
custom_emails: Optional[Sequence[str]] = None,
send_to_subscription_administrator: Optional[bool] = None,
send_to_subscription_co_administrators: Optional[bool] = None):
"""
Email notification of an autoscale event.
:param Sequence[str] custom_emails: the custom e-mails list. This value can be null or empty, in which case this attribute will be ignored.
:param bool send_to_subscription_administrator: a value indicating whether to send email to subscription administrator.
:param bool send_to_subscription_co_administrators: a value indicating whether to send email to subscription co-administrators.
"""
if custom_emails is not None:
pulumi.set(__self__, "custom_emails", custom_emails)
if send_to_subscription_administrator is not None:
pulumi.set(__self__, "send_to_subscription_administrator", send_to_subscription_administrator)
if send_to_subscription_co_administrators is not None:
pulumi.set(__self__, "send_to_subscription_co_administrators", send_to_subscription_co_administrators)
@property
@pulumi.getter(name="customEmails")
def custom_emails(self) -> Optional[Sequence[str]]:
"""
the custom e-mails list. This value can be null or empty, in which case this attribute will be ignored.
"""
return pulumi.get(self, "custom_emails")
@property
@pulumi.getter(name="sendToSubscriptionAdministrator")
def send_to_subscription_administrator(self) -> Optional[bool]:
"""
a value indicating whether to send email to subscription administrator.
"""
return pulumi.get(self, "send_to_subscription_administrator")
@property
@pulumi.getter(name="sendToSubscriptionCoAdministrators")
def send_to_subscription_co_administrators(self) -> Optional[bool]:
"""
a value indicating whether to send email to subscription co-administrators.
"""
return pulumi.get(self, "send_to_subscription_co_administrators")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EmailReceiverResponse(dict):
"""
An email receiver.
"""
def __init__(__self__, *,
email_address: str,
name: str,
status: str,
use_common_alert_schema: bool):
"""
An email receiver.
:param str email_address: The email address of this receiver.
:param str name: The name of the email receiver. Names must be unique across all receivers within an action group.
:param str status: The receiver status of the e-mail.
:param bool use_common_alert_schema: Indicates whether to use common alert schema.
"""
pulumi.set(__self__, "email_address", email_address)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "use_common_alert_schema", use_common_alert_schema)
@property
@pulumi.getter(name="emailAddress")
def email_address(self) -> str:
"""
The email address of this receiver.
"""
return pulumi.get(self, "email_address")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the email receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def status(self) -> str:
"""
The receiver status of the e-mail.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="useCommonAlertSchema")
def use_common_alert_schema(self) -> bool:
"""
Indicates whether to use common alert schema.
"""
return pulumi.get(self, "use_common_alert_schema")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EtwEventConfigurationResponse(dict):
def __init__(__self__, *,
id: int,
name: str,
filter: Optional[str] = None):
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
if filter is not None:
pulumi.set(__self__, "filter", filter)
@property
@pulumi.getter
def id(self) -> int:
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
return pulumi.get(self, "filter")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EtwProviderConfigurationResponse(dict):
def __init__(__self__, *,
events: Sequence['outputs.EtwEventConfigurationResponse'],
id: str):
pulumi.set(__self__, "events", events)
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def events(self) -> Sequence['outputs.EtwEventConfigurationResponse']:
return pulumi.get(self, "events")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EventLogConfigurationResponse(dict):
def __init__(__self__, *,
log_name: str,
filter: Optional[str] = None):
pulumi.set(__self__, "log_name", log_name)
if filter is not None:
pulumi.set(__self__, "filter", filter)
@property
@pulumi.getter(name="logName")
def log_name(self) -> str:
return pulumi.get(self, "log_name")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
return pulumi.get(self, "filter")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExtensionDataSourceResponse(dict):
"""
Definition of which data will be collected from a separate VM extension that integrates with the Azure Monitor Agent.
Collected from either Windows and Linux machines, depending on which extension is defined.
"""
def __init__(__self__, *,
extension_name: str,
name: str,
streams: Sequence[str],
extension_settings: Optional[Any] = None):
"""
Definition of which data will be collected from a separate VM extension that integrates with the Azure Monitor Agent.
Collected from either Windows and Linux machines, depending on which extension is defined.
:param str extension_name: The name of the VM extension.
:param str name: A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
:param Sequence[str] streams: List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
:param Any extension_settings: The extension settings. The format is specific for particular extension.
"""
pulumi.set(__self__, "extension_name", extension_name)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "streams", streams)
if extension_settings is not None:
pulumi.set(__self__, "extension_settings", extension_settings)
@property
@pulumi.getter(name="extensionName")
def extension_name(self) -> str:
"""
The name of the VM extension.
"""
return pulumi.get(self, "extension_name")
@property
@pulumi.getter
def name(self) -> str:
"""
A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def streams(self) -> Sequence[str]:
"""
List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
"""
return pulumi.get(self, "streams")
@property
@pulumi.getter(name="extensionSettings")
def extension_settings(self) -> Optional[Any]:
"""
The extension settings. The format is specific for particular extension.
"""
return pulumi.get(self, "extension_settings")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ItsmReceiverResponse(dict):
"""
An Itsm receiver.
"""
def __init__(__self__, *,
connection_id: str,
name: str,
region: str,
ticket_configuration: str,
workspace_id: str):
"""
An Itsm receiver.
:param str connection_id: Unique identification of ITSM connection among multiple defined in above workspace.
:param str name: The name of the Itsm receiver. Names must be unique across all receivers within an action group.
:param str region: Region in which workspace resides. Supported values:'centralindia','japaneast','southeastasia','australiasoutheast','uksouth','westcentralus','canadacentral','eastus','westeurope'
:param str ticket_configuration: JSON blob for the configurations of the ITSM action. CreateMultipleWorkItems option will be part of this blob as well.
:param str workspace_id: OMS LA instance identifier.
"""
pulumi.set(__self__, "connection_id", connection_id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "region", region)
pulumi.set(__self__, "ticket_configuration", ticket_configuration)
pulumi.set(__self__, "workspace_id", workspace_id)
@property
@pulumi.getter(name="connectionId")
def connection_id(self) -> str:
"""
Unique identification of ITSM connection among multiple defined in above workspace.
"""
return pulumi.get(self, "connection_id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the Itsm receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def region(self) -> str:
"""
Region in which workspace resides. Supported values:'centralindia','japaneast','southeastasia','australiasoutheast','uksouth','westcentralus','canadacentral','eastus','westeurope'
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="ticketConfiguration")
def ticket_configuration(self) -> str:
"""
JSON blob for the configurations of the ITSM action. CreateMultipleWorkItems option will be part of this blob as well.
"""
return pulumi.get(self, "ticket_configuration")
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> str:
"""
OMS LA instance identifier.
"""
return pulumi.get(self, "workspace_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class LocationThresholdRuleConditionResponse(dict):
"""
A rule condition based on a certain number of locations failing.
"""
def __init__(__self__, *,
failed_location_count: int,
odata_type: str,
data_source: Optional[Any] = None,
window_size: Optional[str] = None):
"""
A rule condition based on a certain number of locations failing.
:param int failed_location_count: the number of locations that must fail to activate the alert.
:param str odata_type: specifies the type of condition. This can be one of three types: ManagementEventRuleCondition (occurrences of management events), LocationThresholdRuleCondition (based on the number of failures of a web test), and ThresholdRuleCondition (based on the threshold of a metric).
Expected value is 'Microsoft.Azure.Management.Insights.Models.LocationThresholdRuleCondition'.
:param Union['RuleManagementEventDataSourceResponseArgs', 'RuleMetricDataSourceResponseArgs'] data_source: the resource from which the rule collects its data. For this type dataSource will always be of type RuleMetricDataSource.
:param str window_size: the period of time (in ISO 8601 duration format) that is used to monitor alert activity based on the threshold. If specified then it must be between 5 minutes and 1 day.
"""
pulumi.set(__self__, "failed_location_count", failed_location_count)
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Management.Insights.Models.LocationThresholdRuleCondition')
if data_source is not None:
pulumi.set(__self__, "data_source", data_source)
if window_size is not None:
pulumi.set(__self__, "window_size", window_size)
@property
@pulumi.getter(name="failedLocationCount")
def failed_location_count(self) -> int:
"""
the number of locations that must fail to activate the alert.
"""
return pulumi.get(self, "failed_location_count")
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of condition. This can be one of three types: ManagementEventRuleCondition (occurrences of management events), LocationThresholdRuleCondition (based on the number of failures of a web test), and ThresholdRuleCondition (based on the threshold of a metric).
Expected value is 'Microsoft.Azure.Management.Insights.Models.LocationThresholdRuleCondition'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter(name="dataSource")
def data_source(self) -> Optional[Any]:
"""
the resource from which the rule collects its data. For this type dataSource will always be of type RuleMetricDataSource.
"""
return pulumi.get(self, "data_source")
@property
@pulumi.getter(name="windowSize")
def window_size(self) -> Optional[str]:
"""
the period of time (in ISO 8601 duration format) that is used to monitor alert activity based on the threshold. If specified then it must be between 5 minutes and 1 day.
"""
return pulumi.get(self, "window_size")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class LogAnalyticsDestinationResponse(dict):
"""
Log Analytics destination.
"""
def __init__(__self__, *,
name: str,
workspace_resource_id: str):
"""
Log Analytics destination.
:param str name: A friendly name for the destination.
This name should be unique across all destinations (regardless of type) within the data collection rule.
:param str workspace_resource_id: The resource ID of the Log Analytics workspace.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "workspace_resource_id", workspace_resource_id)
@property
@pulumi.getter
def name(self) -> str:
"""
A friendly name for the destination.
This name should be unique across all destinations (regardless of type) within the data collection rule.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="workspaceResourceId")
def workspace_resource_id(self) -> str:
"""
The resource ID of the Log Analytics workspace.
"""
return pulumi.get(self, "workspace_resource_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class LogMetricTriggerResponse(dict):
"""
A log metrics trigger descriptor.
"""
def __init__(__self__, *,
metric_column: Optional[str] = None,
metric_trigger_type: Optional[str] = None,
threshold: Optional[float] = None,
threshold_operator: Optional[str] = None):
"""
A log metrics trigger descriptor.
:param str metric_column: Evaluation of metric on a particular column
:param str metric_trigger_type: Metric Trigger Type - 'Consecutive' or 'Total'
:param float threshold: The threshold of the metric trigger.
:param str threshold_operator: Evaluation operation for Metric -'GreaterThan' or 'LessThan' or 'Equal'.
"""
if metric_column is not None:
pulumi.set(__self__, "metric_column", metric_column)
if metric_trigger_type is not None:
pulumi.set(__self__, "metric_trigger_type", metric_trigger_type)
if threshold is not None:
pulumi.set(__self__, "threshold", threshold)
if threshold_operator is not None:
pulumi.set(__self__, "threshold_operator", threshold_operator)
@property
@pulumi.getter(name="metricColumn")
def metric_column(self) -> Optional[str]:
"""
Evaluation of metric on a particular column
"""
return pulumi.get(self, "metric_column")
@property
@pulumi.getter(name="metricTriggerType")
def metric_trigger_type(self) -> Optional[str]:
"""
Metric Trigger Type - 'Consecutive' or 'Total'
"""
return pulumi.get(self, "metric_trigger_type")
@property
@pulumi.getter
def threshold(self) -> Optional[float]:
"""
The threshold of the metric trigger.
"""
return pulumi.get(self, "threshold")
@property
@pulumi.getter(name="thresholdOperator")
def threshold_operator(self) -> Optional[str]:
"""
Evaluation operation for Metric -'GreaterThan' or 'LessThan' or 'Equal'.
"""
return pulumi.get(self, "threshold_operator")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class LogSettingsResponse(dict):
"""
Part of MultiTenantDiagnosticSettings. Specifies the settings for a particular log.
"""
def __init__(__self__, *,
enabled: bool,
category: Optional[str] = None,
retention_policy: Optional['outputs.RetentionPolicyResponse'] = None):
"""
Part of MultiTenantDiagnosticSettings. Specifies the settings for a particular log.
:param bool enabled: a value indicating whether this log is enabled.
:param str category: Name of a Diagnostic Log category for a resource type this setting is applied to. To obtain the list of Diagnostic Log categories for a resource, first perform a GET diagnostic settings operation.
:param 'RetentionPolicyResponseArgs' retention_policy: the retention policy for this log.
"""
pulumi.set(__self__, "enabled", enabled)
if category is not None:
pulumi.set(__self__, "category", category)
if retention_policy is not None:
pulumi.set(__self__, "retention_policy", retention_policy)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
a value indicating whether this log is enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def category(self) -> Optional[str]:
"""
Name of a Diagnostic Log category for a resource type this setting is applied to. To obtain the list of Diagnostic Log categories for a resource, first perform a GET diagnostic settings operation.
"""
return pulumi.get(self, "category")
@property
@pulumi.getter(name="retentionPolicy")
def retention_policy(self) -> Optional['outputs.RetentionPolicyResponse']:
"""
the retention policy for this log.
"""
return pulumi.get(self, "retention_policy")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class LogToMetricActionResponse(dict):
"""
Specify action need to be taken when rule type is converting log to metric
"""
def __init__(__self__, *,
criteria: Sequence['outputs.CriteriaResponse'],
odata_type: str):
"""
Specify action need to be taken when rule type is converting log to metric
:param Sequence['CriteriaResponseArgs'] criteria: Criteria of Metric
:param str odata_type: Specifies the action. Supported values - AlertingAction, LogToMetricAction
Expected value is 'Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.LogToMetricAction'.
"""
pulumi.set(__self__, "criteria", criteria)
pulumi.set(__self__, "odata_type", 'Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.LogToMetricAction')
@property
@pulumi.getter
def criteria(self) -> Sequence['outputs.CriteriaResponse']:
"""
Criteria of Metric
"""
return pulumi.get(self, "criteria")
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
Specifies the action. Supported values - AlertingAction, LogToMetricAction
Expected value is 'Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.LogToMetricAction'.
"""
return pulumi.get(self, "odata_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class LogicAppReceiverResponse(dict):
"""
A logic app receiver.
"""
def __init__(__self__, *,
callback_url: str,
name: str,
resource_id: str,
use_common_alert_schema: bool):
"""
A logic app receiver.
:param str callback_url: The callback url where http request sent to.
:param str name: The name of the logic app receiver. Names must be unique across all receivers within an action group.
:param str resource_id: The azure resource id of the logic app receiver.
:param bool use_common_alert_schema: Indicates whether to use common alert schema.
"""
pulumi.set(__self__, "callback_url", callback_url)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "resource_id", resource_id)
pulumi.set(__self__, "use_common_alert_schema", use_common_alert_schema)
@property
@pulumi.getter(name="callbackUrl")
def callback_url(self) -> str:
"""
The callback url where http request sent to.
"""
return pulumi.get(self, "callback_url")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the logic app receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> str:
"""
The azure resource id of the logic app receiver.
"""
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="useCommonAlertSchema")
def use_common_alert_schema(self) -> bool:
"""
Indicates whether to use common alert schema.
"""
return pulumi.get(self, "use_common_alert_schema")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedIdentityResponse(dict):
"""
Customer Managed Identity
"""
def __init__(__self__, *,
type: Optional[str] = None,
user_assigned_identities: Optional['outputs.UserAssignedIdentitiesResponse'] = None):
"""
Customer Managed Identity
:param str type: The identity type.
:param 'UserAssignedIdentitiesResponseArgs' user_assigned_identities: Customer Managed Identity
"""
if type is not None:
pulumi.set(__self__, "type", type)
if user_assigned_identities is not None:
pulumi.set(__self__, "user_assigned_identities", user_assigned_identities)
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The identity type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="userAssignedIdentities")
def user_assigned_identities(self) -> Optional['outputs.UserAssignedIdentitiesResponse']:
"""
Customer Managed Identity
"""
return pulumi.get(self, "user_assigned_identities")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagementEventAggregationConditionResponse(dict):
"""
How the data that is collected should be combined over time.
"""
def __init__(__self__, *,
operator: Optional[str] = None,
threshold: Optional[float] = None,
window_size: Optional[str] = None):
"""
How the data that is collected should be combined over time.
:param str operator: the condition operator.
:param float threshold: The threshold value that activates the alert.
:param str window_size: the period of time (in ISO 8601 duration format) that is used to monitor alert activity based on the threshold. If specified then it must be between 5 minutes and 1 day.
"""
if operator is not None:
pulumi.set(__self__, "operator", operator)
if threshold is not None:
pulumi.set(__self__, "threshold", threshold)
if window_size is not None:
pulumi.set(__self__, "window_size", window_size)
@property
@pulumi.getter
def operator(self) -> Optional[str]:
"""
the condition operator.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def threshold(self) -> Optional[float]:
"""
The threshold value that activates the alert.
"""
return pulumi.get(self, "threshold")
@property
@pulumi.getter(name="windowSize")
def window_size(self) -> Optional[str]:
"""
the period of time (in ISO 8601 duration format) that is used to monitor alert activity based on the threshold. If specified then it must be between 5 minutes and 1 day.
"""
return pulumi.get(self, "window_size")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagementEventRuleConditionResponse(dict):
"""
A management event rule condition.
"""
def __init__(__self__, *,
odata_type: str,
aggregation: Optional['outputs.ManagementEventAggregationConditionResponse'] = None,
data_source: Optional[Any] = None):
"""
A management event rule condition.
:param str odata_type: specifies the type of condition. This can be one of three types: ManagementEventRuleCondition (occurrences of management events), LocationThresholdRuleCondition (based on the number of failures of a web test), and ThresholdRuleCondition (based on the threshold of a metric).
Expected value is 'Microsoft.Azure.Management.Insights.Models.ManagementEventRuleCondition'.
:param 'ManagementEventAggregationConditionResponseArgs' aggregation: How the data that is collected should be combined over time and when the alert is activated. Note that for management event alerts aggregation is optional – if it is not provided then any event will cause the alert to activate.
:param Union['RuleManagementEventDataSourceResponseArgs', 'RuleMetricDataSourceResponseArgs'] data_source: the resource from which the rule collects its data. For this type dataSource will always be of type RuleMetricDataSource.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Management.Insights.Models.ManagementEventRuleCondition')
if aggregation is not None:
pulumi.set(__self__, "aggregation", aggregation)
if data_source is not None:
pulumi.set(__self__, "data_source", data_source)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of condition. This can be one of three types: ManagementEventRuleCondition (occurrences of management events), LocationThresholdRuleCondition (based on the number of failures of a web test), and ThresholdRuleCondition (based on the threshold of a metric).
Expected value is 'Microsoft.Azure.Management.Insights.Models.ManagementEventRuleCondition'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter
def aggregation(self) -> Optional['outputs.ManagementEventAggregationConditionResponse']:
"""
How the data that is collected should be combined over time and when the alert is activated. Note that for management event alerts aggregation is optional – if it is not provided then any event will cause the alert to activate.
"""
return pulumi.get(self, "aggregation")
@property
@pulumi.getter(name="dataSource")
def data_source(self) -> Optional[Any]:
"""
the resource from which the rule collects its data. For this type dataSource will always be of type RuleMetricDataSource.
"""
return pulumi.get(self, "data_source")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricAlertActionResponse(dict):
"""
An alert action.
"""
def __init__(__self__, *,
action_group_id: Optional[str] = None,
web_hook_properties: Optional[Mapping[str, str]] = None):
"""
An alert action.
:param str action_group_id: the id of the action group to use.
:param Mapping[str, str] web_hook_properties: This field allows specifying custom properties, which would be appended to the alert payload sent as input to the webhook.
"""
if action_group_id is not None:
pulumi.set(__self__, "action_group_id", action_group_id)
if web_hook_properties is not None:
pulumi.set(__self__, "web_hook_properties", web_hook_properties)
@property
@pulumi.getter(name="actionGroupId")
def action_group_id(self) -> Optional[str]:
"""
the id of the action group to use.
"""
return pulumi.get(self, "action_group_id")
@property
@pulumi.getter(name="webHookProperties")
def web_hook_properties(self) -> Optional[Mapping[str, str]]:
"""
This field allows specifying custom properties, which would be appended to the alert payload sent as input to the webhook.
"""
return pulumi.get(self, "web_hook_properties")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricAlertMultipleResourceMultipleMetricCriteriaResponse(dict):
"""
Specifies the metric alert criteria for multiple resource that has multiple metric criteria.
"""
def __init__(__self__, *,
odata_type: str,
all_of: Optional[Sequence[Any]] = None):
"""
Specifies the metric alert criteria for multiple resource that has multiple metric criteria.
:param str odata_type: specifies the type of the alert criteria.
Expected value is 'Microsoft.Azure.Monitor.MultipleResourceMultipleMetricCriteria'.
:param Sequence[Union['DynamicMetricCriteriaResponseArgs', 'MetricCriteriaResponseArgs']] all_of: the list of multiple metric criteria for this 'all of' operation.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Monitor.MultipleResourceMultipleMetricCriteria')
if all_of is not None:
pulumi.set(__self__, "all_of", all_of)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of the alert criteria.
Expected value is 'Microsoft.Azure.Monitor.MultipleResourceMultipleMetricCriteria'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter(name="allOf")
def all_of(self) -> Optional[Sequence[Any]]:
"""
the list of multiple metric criteria for this 'all of' operation.
"""
return pulumi.get(self, "all_of")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricAlertSingleResourceMultipleMetricCriteriaResponse(dict):
"""
Specifies the metric alert criteria for a single resource that has multiple metric criteria.
"""
def __init__(__self__, *,
odata_type: str,
all_of: Optional[Sequence['outputs.MetricCriteriaResponse']] = None):
"""
Specifies the metric alert criteria for a single resource that has multiple metric criteria.
:param str odata_type: specifies the type of the alert criteria.
Expected value is 'Microsoft.Azure.Monitor.SingleResourceMultipleMetricCriteria'.
:param Sequence['MetricCriteriaResponseArgs'] all_of: The list of metric criteria for this 'all of' operation.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Monitor.SingleResourceMultipleMetricCriteria')
if all_of is not None:
pulumi.set(__self__, "all_of", all_of)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of the alert criteria.
Expected value is 'Microsoft.Azure.Monitor.SingleResourceMultipleMetricCriteria'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter(name="allOf")
def all_of(self) -> Optional[Sequence['outputs.MetricCriteriaResponse']]:
"""
The list of metric criteria for this 'all of' operation.
"""
return pulumi.get(self, "all_of")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricCriteriaResponse(dict):
"""
Criterion to filter metrics.
"""
def __init__(__self__, *,
criterion_type: str,
metric_name: str,
name: str,
operator: str,
threshold: float,
time_aggregation: str,
dimensions: Optional[Sequence['outputs.MetricDimensionResponse']] = None,
metric_namespace: Optional[str] = None,
skip_metric_validation: Optional[bool] = None):
"""
Criterion to filter metrics.
:param str criterion_type: Specifies the type of threshold criteria
Expected value is 'StaticThresholdCriterion'.
:param str metric_name: Name of the metric.
:param str name: Name of the criteria.
:param str operator: the criteria operator.
:param float threshold: the criteria threshold value that activates the alert.
:param str time_aggregation: the criteria time aggregation types.
:param Sequence['MetricDimensionResponseArgs'] dimensions: List of dimension conditions.
:param str metric_namespace: Namespace of the metric.
:param bool skip_metric_validation: Allows creating an alert rule on a custom metric that isn't yet emitted, by causing the metric validation to be skipped.
"""
pulumi.set(__self__, "criterion_type", 'StaticThresholdCriterion')
pulumi.set(__self__, "metric_name", metric_name)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "threshold", threshold)
pulumi.set(__self__, "time_aggregation", time_aggregation)
if dimensions is not None:
pulumi.set(__self__, "dimensions", dimensions)
if metric_namespace is not None:
pulumi.set(__self__, "metric_namespace", metric_namespace)
if skip_metric_validation is not None:
pulumi.set(__self__, "skip_metric_validation", skip_metric_validation)
@property
@pulumi.getter(name="criterionType")
def criterion_type(self) -> str:
"""
Specifies the type of threshold criteria
Expected value is 'StaticThresholdCriterion'.
"""
return pulumi.get(self, "criterion_type")
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> str:
"""
Name of the metric.
"""
return pulumi.get(self, "metric_name")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the criteria.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def operator(self) -> str:
"""
the criteria operator.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def threshold(self) -> float:
"""
the criteria threshold value that activates the alert.
"""
return pulumi.get(self, "threshold")
@property
@pulumi.getter(name="timeAggregation")
def time_aggregation(self) -> str:
"""
the criteria time aggregation types.
"""
return pulumi.get(self, "time_aggregation")
@property
@pulumi.getter
def dimensions(self) -> Optional[Sequence['outputs.MetricDimensionResponse']]:
"""
List of dimension conditions.
"""
return pulumi.get(self, "dimensions")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> Optional[str]:
"""
Namespace of the metric.
"""
return pulumi.get(self, "metric_namespace")
@property
@pulumi.getter(name="skipMetricValidation")
def skip_metric_validation(self) -> Optional[bool]:
"""
Allows creating an alert rule on a custom metric that isn't yet emitted, by causing the metric validation to be skipped.
"""
return pulumi.get(self, "skip_metric_validation")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricDimensionResponse(dict):
"""
Specifies a metric dimension.
"""
def __init__(__self__, *,
name: str,
operator: str,
values: Sequence[str]):
"""
Specifies a metric dimension.
:param str name: Name of the dimension.
:param str operator: the dimension operator. Only 'Include' and 'Exclude' are supported
:param Sequence[str] values: list of dimension values.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the dimension.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def operator(self) -> str:
"""
the dimension operator. Only 'Include' and 'Exclude' are supported
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
list of dimension values.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricSettingsResponse(dict):
"""
Part of MultiTenantDiagnosticSettings. Specifies the settings for a particular metric.
"""
def __init__(__self__, *,
enabled: bool,
category: Optional[str] = None,
retention_policy: Optional['outputs.RetentionPolicyResponse'] = None,
time_grain: Optional[str] = None):
"""
Part of MultiTenantDiagnosticSettings. Specifies the settings for a particular metric.
:param bool enabled: a value indicating whether this category is enabled.
:param str category: Name of a Diagnostic Metric category for a resource type this setting is applied to. To obtain the list of Diagnostic metric categories for a resource, first perform a GET diagnostic settings operation.
:param 'RetentionPolicyResponseArgs' retention_policy: the retention policy for this category.
:param str time_grain: the timegrain of the metric in ISO8601 format.
"""
pulumi.set(__self__, "enabled", enabled)
if category is not None:
pulumi.set(__self__, "category", category)
if retention_policy is not None:
pulumi.set(__self__, "retention_policy", retention_policy)
if time_grain is not None:
pulumi.set(__self__, "time_grain", time_grain)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
a value indicating whether this category is enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def category(self) -> Optional[str]:
"""
Name of a Diagnostic Metric category for a resource type this setting is applied to. To obtain the list of Diagnostic metric categories for a resource, first perform a GET diagnostic settings operation.
"""
return pulumi.get(self, "category")
@property
@pulumi.getter(name="retentionPolicy")
def retention_policy(self) -> Optional['outputs.RetentionPolicyResponse']:
"""
the retention policy for this category.
"""
return pulumi.get(self, "retention_policy")
@property
@pulumi.getter(name="timeGrain")
def time_grain(self) -> Optional[str]:
"""
the timegrain of the metric in ISO8601 format.
"""
return pulumi.get(self, "time_grain")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MetricTriggerResponse(dict):
"""
The trigger that results in a scaling action.
"""
def __init__(__self__, *,
metric_name: str,
metric_resource_uri: str,
operator: str,
statistic: str,
threshold: float,
time_aggregation: str,
time_grain: str,
time_window: str,
dimensions: Optional[Sequence['outputs.ScaleRuleMetricDimensionResponse']] = None,
metric_namespace: Optional[str] = None):
"""
The trigger that results in a scaling action.
:param str metric_name: the name of the metric that defines what the rule monitors.
:param str metric_resource_uri: the resource identifier of the resource the rule monitors.
:param str operator: the operator that is used to compare the metric data and the threshold.
:param str statistic: the metric statistic type. How the metrics from multiple instances are combined.
:param float threshold: the threshold of the metric that triggers the scale action.
:param str time_aggregation: time aggregation type. How the data that is collected should be combined over time. The default value is Average.
:param str time_grain: the granularity of metrics the rule monitors. Must be one of the predefined values returned from metric definitions for the metric. Must be between 12 hours and 1 minute.
:param str time_window: the range of time in which instance data is collected. This value must be greater than the delay in metric collection, which can vary from resource-to-resource. Must be between 12 hours and 5 minutes.
:param Sequence['ScaleRuleMetricDimensionResponseArgs'] dimensions: List of dimension conditions. For example: [{"DimensionName":"AppName","Operator":"Equals","Values":["App1"]},{"DimensionName":"Deployment","Operator":"Equals","Values":["default"]}].
:param str metric_namespace: the namespace of the metric that defines what the rule monitors.
"""
pulumi.set(__self__, "metric_name", metric_name)
pulumi.set(__self__, "metric_resource_uri", metric_resource_uri)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "statistic", statistic)
pulumi.set(__self__, "threshold", threshold)
pulumi.set(__self__, "time_aggregation", time_aggregation)
pulumi.set(__self__, "time_grain", time_grain)
pulumi.set(__self__, "time_window", time_window)
if dimensions is not None:
pulumi.set(__self__, "dimensions", dimensions)
if metric_namespace is not None:
pulumi.set(__self__, "metric_namespace", metric_namespace)
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> str:
"""
the name of the metric that defines what the rule monitors.
"""
return pulumi.get(self, "metric_name")
@property
@pulumi.getter(name="metricResourceUri")
def metric_resource_uri(self) -> str:
"""
the resource identifier of the resource the rule monitors.
"""
return pulumi.get(self, "metric_resource_uri")
@property
@pulumi.getter
def operator(self) -> str:
"""
the operator that is used to compare the metric data and the threshold.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def statistic(self) -> str:
"""
the metric statistic type. How the metrics from multiple instances are combined.
"""
return pulumi.get(self, "statistic")
@property
@pulumi.getter
def threshold(self) -> float:
"""
the threshold of the metric that triggers the scale action.
"""
return pulumi.get(self, "threshold")
@property
@pulumi.getter(name="timeAggregation")
def time_aggregation(self) -> str:
"""
time aggregation type. How the data that is collected should be combined over time. The default value is Average.
"""
return pulumi.get(self, "time_aggregation")
@property
@pulumi.getter(name="timeGrain")
def time_grain(self) -> str:
"""
the granularity of metrics the rule monitors. Must be one of the predefined values returned from metric definitions for the metric. Must be between 12 hours and 1 minute.
"""
return pulumi.get(self, "time_grain")
@property
@pulumi.getter(name="timeWindow")
def time_window(self) -> str:
"""
the range of time in which instance data is collected. This value must be greater than the delay in metric collection, which can vary from resource-to-resource. Must be between 12 hours and 5 minutes.
"""
return pulumi.get(self, "time_window")
@property
@pulumi.getter
def dimensions(self) -> Optional[Sequence['outputs.ScaleRuleMetricDimensionResponse']]:
"""
List of dimension conditions. For example: [{"DimensionName":"AppName","Operator":"Equals","Values":["App1"]},{"DimensionName":"Deployment","Operator":"Equals","Values":["default"]}].
"""
return pulumi.get(self, "dimensions")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> Optional[str]:
"""
the namespace of the metric that defines what the rule monitors.
"""
return pulumi.get(self, "metric_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PerfCounterDataSourceResponse(dict):
"""
Definition of which performance counters will be collected and how they will be collected by this data collection rule.
Collected from both Windows and Linux machines where the counter is present.
"""
def __init__(__self__, *,
counter_specifiers: Sequence[str],
name: str,
sampling_frequency_in_seconds: int,
scheduled_transfer_period: str,
streams: Sequence[str]):
"""
Definition of which performance counters will be collected and how they will be collected by this data collection rule.
Collected from both Windows and Linux machines where the counter is present.
:param Sequence[str] counter_specifiers: A list of specifier names of the performance counters you want to collect.
Use a wildcard (*) to collect a counter for all instances.
To get a list of performance counters on Windows, run the command 'typeperf'.
:param str name: A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
:param int sampling_frequency_in_seconds: The number of seconds between consecutive counter measurements (samples).
:param str scheduled_transfer_period: The interval between data uploads (scheduled transfers), rounded up to the nearest minute.
:param Sequence[str] streams: List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
"""
pulumi.set(__self__, "counter_specifiers", counter_specifiers)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "sampling_frequency_in_seconds", sampling_frequency_in_seconds)
pulumi.set(__self__, "scheduled_transfer_period", scheduled_transfer_period)
pulumi.set(__self__, "streams", streams)
@property
@pulumi.getter(name="counterSpecifiers")
def counter_specifiers(self) -> Sequence[str]:
"""
A list of specifier names of the performance counters you want to collect.
Use a wildcard (*) to collect a counter for all instances.
To get a list of performance counters on Windows, run the command 'typeperf'.
"""
return pulumi.get(self, "counter_specifiers")
@property
@pulumi.getter
def name(self) -> str:
"""
A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="samplingFrequencyInSeconds")
def sampling_frequency_in_seconds(self) -> int:
"""
The number of seconds between consecutive counter measurements (samples).
"""
return pulumi.get(self, "sampling_frequency_in_seconds")
@property
@pulumi.getter(name="scheduledTransferPeriod")
def scheduled_transfer_period(self) -> str:
"""
The interval between data uploads (scheduled transfers), rounded up to the nearest minute.
"""
return pulumi.get(self, "scheduled_transfer_period")
@property
@pulumi.getter
def streams(self) -> Sequence[str]:
"""
List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
"""
return pulumi.get(self, "streams")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PerformanceCounterConfigurationResponse(dict):
def __init__(__self__, *,
name: str,
sampling_period: str,
instance: Optional[str] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "sampling_period", sampling_period)
if instance is not None:
pulumi.set(__self__, "instance", instance)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="samplingPeriod")
def sampling_period(self) -> str:
return pulumi.get(self, "sampling_period")
@property
@pulumi.getter
def instance(self) -> Optional[str]:
return pulumi.get(self, "instance")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateEndpointConnectionResponse(dict):
"""
A private endpoint connection
"""
def __init__(__self__, *,
id: str,
name: str,
provisioning_state: str,
type: str,
private_endpoint: Optional['outputs.PrivateEndpointPropertyResponse'] = None,
private_link_service_connection_state: Optional['outputs.PrivateLinkServiceConnectionStatePropertyResponse'] = None):
"""
A private endpoint connection
:param str id: Azure resource Id
:param str name: Azure resource name
:param str provisioning_state: State of the private endpoint connection.
:param str type: Azure resource type
:param 'PrivateEndpointPropertyResponseArgs' private_endpoint: Private endpoint which the connection belongs to.
:param 'PrivateLinkServiceConnectionStatePropertyResponseArgs' private_link_service_connection_state: Connection state of the private endpoint connection.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "provisioning_state", provisioning_state)
pulumi.set(__self__, "type", type)
if private_endpoint is not None:
pulumi.set(__self__, "private_endpoint", private_endpoint)
if private_link_service_connection_state is not None:
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
State of the private endpoint connection.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional['outputs.PrivateEndpointPropertyResponse']:
"""
Private endpoint which the connection belongs to.
"""
return pulumi.get(self, "private_endpoint")
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> Optional['outputs.PrivateLinkServiceConnectionStatePropertyResponse']:
"""
Connection state of the private endpoint connection.
"""
return pulumi.get(self, "private_link_service_connection_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateEndpointPropertyResponse(dict):
"""
Private endpoint which the connection belongs to.
"""
def __init__(__self__, *,
id: Optional[str] = None):
"""
Private endpoint which the connection belongs to.
:param str id: Resource id of the private endpoint.
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource id of the private endpoint.
"""
return pulumi.get(self, "id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateLinkScopedResourceResponse(dict):
"""
The private link scope resource reference.
"""
def __init__(__self__, *,
resource_id: Optional[str] = None,
scope_id: Optional[str] = None):
"""
The private link scope resource reference.
:param str resource_id: The full resource Id of the private link scope resource.
:param str scope_id: The private link scope unique Identifier.
"""
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if scope_id is not None:
pulumi.set(__self__, "scope_id", scope_id)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[str]:
"""
The full resource Id of the private link scope resource.
"""
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="scopeId")
def scope_id(self) -> Optional[str]:
"""
The private link scope unique Identifier.
"""
return pulumi.get(self, "scope_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateLinkServiceConnectionStatePropertyResponse(dict):
"""
State of the private endpoint connection.
"""
def __init__(__self__, *,
actions_required: str,
description: str,
status: str):
"""
State of the private endpoint connection.
:param str actions_required: The actions required for private link service connection.
:param str description: The private link service connection description.
:param str status: The private link service connection status.
"""
pulumi.set(__self__, "actions_required", actions_required)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="actionsRequired")
def actions_required(self) -> str:
"""
The actions required for private link service connection.
"""
return pulumi.get(self, "actions_required")
@property
@pulumi.getter
def description(self) -> str:
"""
The private link service connection description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def status(self) -> str:
"""
The private link service connection status.
"""
return pulumi.get(self, "status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RecurrenceResponse(dict):
"""
The repeating times at which this profile begins. This element is not used if the FixedDate element is used.
"""
def __init__(__self__, *,
frequency: str,
schedule: 'outputs.RecurrentScheduleResponse'):
"""
The repeating times at which this profile begins. This element is not used if the FixedDate element is used.
:param str frequency: the recurrence frequency. How often the schedule profile should take effect. This value must be Week, meaning each week will have the same set of profiles. For example, to set a daily schedule, set **schedule** to every day of the week. The frequency property specifies that the schedule is repeated weekly.
:param 'RecurrentScheduleResponseArgs' schedule: the scheduling constraints for when the profile begins.
"""
pulumi.set(__self__, "frequency", frequency)
pulumi.set(__self__, "schedule", schedule)
@property
@pulumi.getter
def frequency(self) -> str:
"""
the recurrence frequency. How often the schedule profile should take effect. This value must be Week, meaning each week will have the same set of profiles. For example, to set a daily schedule, set **schedule** to every day of the week. The frequency property specifies that the schedule is repeated weekly.
"""
return pulumi.get(self, "frequency")
@property
@pulumi.getter
def schedule(self) -> 'outputs.RecurrentScheduleResponse':
"""
the scheduling constraints for when the profile begins.
"""
return pulumi.get(self, "schedule")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RecurrentScheduleResponse(dict):
"""
The scheduling constraints for when the profile begins.
"""
def __init__(__self__, *,
days: Sequence[str],
hours: Sequence[int],
minutes: Sequence[int],
time_zone: str):
"""
The scheduling constraints for when the profile begins.
:param Sequence[str] days: the collection of days that the profile takes effect on. Possible values are Sunday through Saturday.
:param Sequence[int] hours: A collection of hours that the profile takes effect on. Values supported are 0 to 23 on the 24-hour clock (AM/PM times are not supported).
:param Sequence[int] minutes: A collection of minutes at which the profile takes effect at.
:param str time_zone: the timezone for the hours of the profile. Some examples of valid time zones are: Dateline Standard Time, UTC-11, Hawaiian Standard Time, Alaskan Standard Time, Pacific Standard Time (Mexico), Pacific Standard Time, US Mountain Standard Time, Mountain Standard Time (Mexico), Mountain Standard Time, Central America Standard Time, Central Standard Time, Central Standard Time (Mexico), Canada Central Standard Time, SA Pacific Standard Time, Eastern Standard Time, US Eastern Standard Time, Venezuela Standard Time, Paraguay Standard Time, Atlantic Standard Time, Central Brazilian Standard Time, SA Western Standard Time, Pacific SA Standard Time, Newfoundland Standard Time, E. South America Standard Time, Argentina Standard Time, SA Eastern Standard Time, Greenland Standard Time, Montevideo Standard Time, Bahia Standard Time, UTC-02, Mid-Atlantic Standard Time, Azores Standard Time, Cape Verde Standard Time, Morocco Standard Time, UTC, GMT Standard Time, Greenwich Standard Time, W. Europe Standard Time, Central Europe Standard Time, Romance Standard Time, Central European Standard Time, W. Central Africa Standard Time, Namibia Standard Time, Jordan Standard Time, GTB Standard Time, Middle East Standard Time, Egypt Standard Time, Syria Standard Time, E. Europe Standard Time, South Africa Standard Time, FLE Standard Time, Turkey Standard Time, Israel Standard Time, Kaliningrad Standard Time, Libya Standard Time, Arabic Standard Time, Arab Standard Time, Belarus Standard Time, Russian Standard Time, E. Africa Standard Time, Iran Standard Time, Arabian Standard Time, Azerbaijan Standard Time, Russia Time Zone 3, Mauritius Standard Time, Georgian Standard Time, Caucasus Standard Time, Afghanistan Standard Time, West Asia Standard Time, Ekaterinburg Standard Time, Pakistan Standard Time, India Standard Time, Sri Lanka Standard Time, Nepal Standard Time, Central Asia Standard Time, Bangladesh Standard Time, N. Central Asia Standard Time, Myanmar Standard Time, SE Asia Standard Time, North Asia Standard Time, China Standard Time, North Asia East Standard Time, Singapore Standard Time, W. Australia Standard Time, Taipei Standard Time, Ulaanbaatar Standard Time, Tokyo Standard Time, Korea Standard Time, Yakutsk Standard Time, Cen. Australia Standard Time, AUS Central Standard Time, E. Australia Standard Time, AUS Eastern Standard Time, West Pacific Standard Time, Tasmania Standard Time, Magadan Standard Time, Vladivostok Standard Time, Russia Time Zone 10, Central Pacific Standard Time, Russia Time Zone 11, New Zealand Standard Time, UTC+12, Fiji Standard Time, Kamchatka Standard Time, Tonga Standard Time, Samoa Standard Time, Line Islands Standard Time
"""
pulumi.set(__self__, "days", days)
pulumi.set(__self__, "hours", hours)
pulumi.set(__self__, "minutes", minutes)
pulumi.set(__self__, "time_zone", time_zone)
@property
@pulumi.getter
def days(self) -> Sequence[str]:
"""
the collection of days that the profile takes effect on. Possible values are Sunday through Saturday.
"""
return pulumi.get(self, "days")
@property
@pulumi.getter
def hours(self) -> Sequence[int]:
"""
A collection of hours that the profile takes effect on. Values supported are 0 to 23 on the 24-hour clock (AM/PM times are not supported).
"""
return pulumi.get(self, "hours")
@property
@pulumi.getter
def minutes(self) -> Sequence[int]:
"""
A collection of minutes at which the profile takes effect at.
"""
return pulumi.get(self, "minutes")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> str:
"""
the timezone for the hours of the profile. Some examples of valid time zones are: Dateline Standard Time, UTC-11, Hawaiian Standard Time, Alaskan Standard Time, Pacific Standard Time (Mexico), Pacific Standard Time, US Mountain Standard Time, Mountain Standard Time (Mexico), Mountain Standard Time, Central America Standard Time, Central Standard Time, Central Standard Time (Mexico), Canada Central Standard Time, SA Pacific Standard Time, Eastern Standard Time, US Eastern Standard Time, Venezuela Standard Time, Paraguay Standard Time, Atlantic Standard Time, Central Brazilian Standard Time, SA Western Standard Time, Pacific SA Standard Time, Newfoundland Standard Time, E. South America Standard Time, Argentina Standard Time, SA Eastern Standard Time, Greenland Standard Time, Montevideo Standard Time, Bahia Standard Time, UTC-02, Mid-Atlantic Standard Time, Azores Standard Time, Cape Verde Standard Time, Morocco Standard Time, UTC, GMT Standard Time, Greenwich Standard Time, W. Europe Standard Time, Central Europe Standard Time, Romance Standard Time, Central European Standard Time, W. Central Africa Standard Time, Namibia Standard Time, Jordan Standard Time, GTB Standard Time, Middle East Standard Time, Egypt Standard Time, Syria Standard Time, E. Europe Standard Time, South Africa Standard Time, FLE Standard Time, Turkey Standard Time, Israel Standard Time, Kaliningrad Standard Time, Libya Standard Time, Arabic Standard Time, Arab Standard Time, Belarus Standard Time, Russian Standard Time, E. Africa Standard Time, Iran Standard Time, Arabian Standard Time, Azerbaijan Standard Time, Russia Time Zone 3, Mauritius Standard Time, Georgian Standard Time, Caucasus Standard Time, Afghanistan Standard Time, West Asia Standard Time, Ekaterinburg Standard Time, Pakistan Standard Time, India Standard Time, Sri Lanka Standard Time, Nepal Standard Time, Central Asia Standard Time, Bangladesh Standard Time, N. Central Asia Standard Time, Myanmar Standard Time, SE Asia Standard Time, North Asia Standard Time, China Standard Time, North Asia East Standard Time, Singapore Standard Time, W. Australia Standard Time, Taipei Standard Time, Ulaanbaatar Standard Time, Tokyo Standard Time, Korea Standard Time, Yakutsk Standard Time, Cen. Australia Standard Time, AUS Central Standard Time, E. Australia Standard Time, AUS Eastern Standard Time, West Pacific Standard Time, Tasmania Standard Time, Magadan Standard Time, Vladivostok Standard Time, Russia Time Zone 10, Central Pacific Standard Time, Russia Time Zone 11, New Zealand Standard Time, UTC+12, Fiji Standard Time, Kamchatka Standard Time, Tonga Standard Time, Samoa Standard Time, Line Islands Standard Time
"""
return pulumi.get(self, "time_zone")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RetentionPolicyResponse(dict):
"""
Specifies the retention policy for the log.
"""
def __init__(__self__, *,
days: int,
enabled: bool):
"""
Specifies the retention policy for the log.
:param int days: the number of days for the retention in days. A value of 0 will retain the events indefinitely.
:param bool enabled: a value indicating whether the retention policy is enabled.
"""
pulumi.set(__self__, "days", days)
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def days(self) -> int:
"""
the number of days for the retention in days. A value of 0 will retain the events indefinitely.
"""
return pulumi.get(self, "days")
@property
@pulumi.getter
def enabled(self) -> bool:
"""
a value indicating whether the retention policy is enabled.
"""
return pulumi.get(self, "enabled")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RuleEmailActionResponse(dict):
"""
Specifies the action to send email when the rule condition is evaluated. The discriminator is always RuleEmailAction in this case.
"""
def __init__(__self__, *,
odata_type: str,
custom_emails: Optional[Sequence[str]] = None,
send_to_service_owners: Optional[bool] = None):
"""
Specifies the action to send email when the rule condition is evaluated. The discriminator is always RuleEmailAction in this case.
:param str odata_type: specifies the type of the action. There are two types of actions: RuleEmailAction and RuleWebhookAction.
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleEmailAction'.
:param Sequence[str] custom_emails: the list of administrator's custom email addresses to notify of the activation of the alert.
:param bool send_to_service_owners: Whether the administrators (service and co-administrators) of the service should be notified when the alert is activated.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Management.Insights.Models.RuleEmailAction')
if custom_emails is not None:
pulumi.set(__self__, "custom_emails", custom_emails)
if send_to_service_owners is not None:
pulumi.set(__self__, "send_to_service_owners", send_to_service_owners)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of the action. There are two types of actions: RuleEmailAction and RuleWebhookAction.
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleEmailAction'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter(name="customEmails")
def custom_emails(self) -> Optional[Sequence[str]]:
"""
the list of administrator's custom email addresses to notify of the activation of the alert.
"""
return pulumi.get(self, "custom_emails")
@property
@pulumi.getter(name="sendToServiceOwners")
def send_to_service_owners(self) -> Optional[bool]:
"""
Whether the administrators (service and co-administrators) of the service should be notified when the alert is activated.
"""
return pulumi.get(self, "send_to_service_owners")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RuleManagementEventClaimsDataSourceResponse(dict):
"""
The claims for a rule management event data source.
"""
def __init__(__self__, *,
email_address: Optional[str] = None):
"""
The claims for a rule management event data source.
:param str email_address: the email address.
"""
if email_address is not None:
pulumi.set(__self__, "email_address", email_address)
@property
@pulumi.getter(name="emailAddress")
def email_address(self) -> Optional[str]:
"""
the email address.
"""
return pulumi.get(self, "email_address")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RuleManagementEventDataSourceResponse(dict):
"""
A rule management event data source. The discriminator fields is always RuleManagementEventDataSource in this case.
"""
def __init__(__self__, *,
odata_type: str,
claims: Optional['outputs.RuleManagementEventClaimsDataSourceResponse'] = None,
event_name: Optional[str] = None,
event_source: Optional[str] = None,
legacy_resource_id: Optional[str] = None,
level: Optional[str] = None,
metric_namespace: Optional[str] = None,
operation_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
resource_location: Optional[str] = None,
resource_provider_name: Optional[str] = None,
resource_uri: Optional[str] = None,
status: Optional[str] = None,
sub_status: Optional[str] = None):
"""
A rule management event data source. The discriminator fields is always RuleManagementEventDataSource in this case.
:param str odata_type: specifies the type of data source. There are two types of rule data sources: RuleMetricDataSource and RuleManagementEventDataSource
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleManagementEventDataSource'.
:param 'RuleManagementEventClaimsDataSourceResponseArgs' claims: the claims.
:param str event_name: the event name.
:param str event_source: the event source.
:param str legacy_resource_id: the legacy resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
:param str level: the level.
:param str metric_namespace: the namespace of the metric.
:param str operation_name: The name of the operation that should be checked for. If no name is provided, any operation will match.
:param str resource_group_name: the resource group name.
:param str resource_location: the location of the resource.
:param str resource_provider_name: the resource provider name.
:param str resource_uri: the resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
:param str status: The status of the operation that should be checked for. If no status is provided, any status will match.
:param str sub_status: the substatus.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Management.Insights.Models.RuleManagementEventDataSource')
if claims is not None:
pulumi.set(__self__, "claims", claims)
if event_name is not None:
pulumi.set(__self__, "event_name", event_name)
if event_source is not None:
pulumi.set(__self__, "event_source", event_source)
if legacy_resource_id is not None:
pulumi.set(__self__, "legacy_resource_id", legacy_resource_id)
if level is not None:
pulumi.set(__self__, "level", level)
if metric_namespace is not None:
pulumi.set(__self__, "metric_namespace", metric_namespace)
if operation_name is not None:
pulumi.set(__self__, "operation_name", operation_name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if resource_location is not None:
pulumi.set(__self__, "resource_location", resource_location)
if resource_provider_name is not None:
pulumi.set(__self__, "resource_provider_name", resource_provider_name)
if resource_uri is not None:
pulumi.set(__self__, "resource_uri", resource_uri)
if status is not None:
pulumi.set(__self__, "status", status)
if sub_status is not None:
pulumi.set(__self__, "sub_status", sub_status)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of data source. There are two types of rule data sources: RuleMetricDataSource and RuleManagementEventDataSource
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleManagementEventDataSource'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter
def claims(self) -> Optional['outputs.RuleManagementEventClaimsDataSourceResponse']:
"""
the claims.
"""
return pulumi.get(self, "claims")
@property
@pulumi.getter(name="eventName")
def event_name(self) -> Optional[str]:
"""
the event name.
"""
return pulumi.get(self, "event_name")
@property
@pulumi.getter(name="eventSource")
def event_source(self) -> Optional[str]:
"""
the event source.
"""
return pulumi.get(self, "event_source")
@property
@pulumi.getter(name="legacyResourceId")
def legacy_resource_id(self) -> Optional[str]:
"""
the legacy resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
"""
return pulumi.get(self, "legacy_resource_id")
@property
@pulumi.getter
def level(self) -> Optional[str]:
"""
the level.
"""
return pulumi.get(self, "level")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> Optional[str]:
"""
the namespace of the metric.
"""
return pulumi.get(self, "metric_namespace")
@property
@pulumi.getter(name="operationName")
def operation_name(self) -> Optional[str]:
"""
The name of the operation that should be checked for. If no name is provided, any operation will match.
"""
return pulumi.get(self, "operation_name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[str]:
"""
the resource group name.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="resourceLocation")
def resource_location(self) -> Optional[str]:
"""
the location of the resource.
"""
return pulumi.get(self, "resource_location")
@property
@pulumi.getter(name="resourceProviderName")
def resource_provider_name(self) -> Optional[str]:
"""
the resource provider name.
"""
return pulumi.get(self, "resource_provider_name")
@property
@pulumi.getter(name="resourceUri")
def resource_uri(self) -> Optional[str]:
"""
the resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
"""
return pulumi.get(self, "resource_uri")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
The status of the operation that should be checked for. If no status is provided, any status will match.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="subStatus")
def sub_status(self) -> Optional[str]:
"""
the substatus.
"""
return pulumi.get(self, "sub_status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RuleMetricDataSourceResponse(dict):
"""
A rule metric data source. The discriminator value is always RuleMetricDataSource in this case.
"""
def __init__(__self__, *,
odata_type: str,
legacy_resource_id: Optional[str] = None,
metric_name: Optional[str] = None,
metric_namespace: Optional[str] = None,
resource_location: Optional[str] = None,
resource_uri: Optional[str] = None):
"""
A rule metric data source. The discriminator value is always RuleMetricDataSource in this case.
:param str odata_type: specifies the type of data source. There are two types of rule data sources: RuleMetricDataSource and RuleManagementEventDataSource
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleMetricDataSource'.
:param str legacy_resource_id: the legacy resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
:param str metric_name: the name of the metric that defines what the rule monitors.
:param str metric_namespace: the namespace of the metric.
:param str resource_location: the location of the resource.
:param str resource_uri: the resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Management.Insights.Models.RuleMetricDataSource')
if legacy_resource_id is not None:
pulumi.set(__self__, "legacy_resource_id", legacy_resource_id)
if metric_name is not None:
pulumi.set(__self__, "metric_name", metric_name)
if metric_namespace is not None:
pulumi.set(__self__, "metric_namespace", metric_namespace)
if resource_location is not None:
pulumi.set(__self__, "resource_location", resource_location)
if resource_uri is not None:
pulumi.set(__self__, "resource_uri", resource_uri)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of data source. There are two types of rule data sources: RuleMetricDataSource and RuleManagementEventDataSource
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleMetricDataSource'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter(name="legacyResourceId")
def legacy_resource_id(self) -> Optional[str]:
"""
the legacy resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
"""
return pulumi.get(self, "legacy_resource_id")
@property
@pulumi.getter(name="metricName")
def metric_name(self) -> Optional[str]:
"""
the name of the metric that defines what the rule monitors.
"""
return pulumi.get(self, "metric_name")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> Optional[str]:
"""
the namespace of the metric.
"""
return pulumi.get(self, "metric_namespace")
@property
@pulumi.getter(name="resourceLocation")
def resource_location(self) -> Optional[str]:
"""
the location of the resource.
"""
return pulumi.get(self, "resource_location")
@property
@pulumi.getter(name="resourceUri")
def resource_uri(self) -> Optional[str]:
"""
the resource identifier of the resource the rule monitors. **NOTE**: this property cannot be updated for an existing rule.
"""
return pulumi.get(self, "resource_uri")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RuleWebhookActionResponse(dict):
"""
Specifies the action to post to service when the rule condition is evaluated. The discriminator is always RuleWebhookAction in this case.
"""
def __init__(__self__, *,
odata_type: str,
properties: Optional[Mapping[str, str]] = None,
service_uri: Optional[str] = None):
"""
Specifies the action to post to service when the rule condition is evaluated. The discriminator is always RuleWebhookAction in this case.
:param str odata_type: specifies the type of the action. There are two types of actions: RuleEmailAction and RuleWebhookAction.
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleWebhookAction'.
:param Mapping[str, str] properties: the dictionary of custom properties to include with the post operation. These data are appended to the webhook payload.
:param str service_uri: the service uri to Post the notification when the alert activates or resolves.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Management.Insights.Models.RuleWebhookAction')
if properties is not None:
pulumi.set(__self__, "properties", properties)
if service_uri is not None:
pulumi.set(__self__, "service_uri", service_uri)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of the action. There are two types of actions: RuleEmailAction and RuleWebhookAction.
Expected value is 'Microsoft.Azure.Management.Insights.Models.RuleWebhookAction'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter
def properties(self) -> Optional[Mapping[str, str]]:
"""
the dictionary of custom properties to include with the post operation. These data are appended to the webhook payload.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="serviceUri")
def service_uri(self) -> Optional[str]:
"""
the service uri to Post the notification when the alert activates or resolves.
"""
return pulumi.get(self, "service_uri")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ScaleActionResponse(dict):
"""
The parameters for the scaling action.
"""
def __init__(__self__, *,
cooldown: str,
direction: str,
type: str,
value: Optional[str] = None):
"""
The parameters for the scaling action.
:param str cooldown: the amount of time to wait since the last scaling action before this action occurs. It must be between 1 week and 1 minute in ISO 8601 format.
:param str direction: the scale direction. Whether the scaling action increases or decreases the number of instances.
:param str type: the type of action that should occur when the scale rule fires.
:param str value: the number of instances that are involved in the scaling action. This value must be 1 or greater. The default value is 1.
"""
pulumi.set(__self__, "cooldown", cooldown)
pulumi.set(__self__, "direction", direction)
pulumi.set(__self__, "type", type)
if value is None:
value = '1'
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def cooldown(self) -> str:
"""
the amount of time to wait since the last scaling action before this action occurs. It must be between 1 week and 1 minute in ISO 8601 format.
"""
return pulumi.get(self, "cooldown")
@property
@pulumi.getter
def direction(self) -> str:
"""
the scale direction. Whether the scaling action increases or decreases the number of instances.
"""
return pulumi.get(self, "direction")
@property
@pulumi.getter
def type(self) -> str:
"""
the type of action that should occur when the scale rule fires.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
the number of instances that are involved in the scaling action. This value must be 1 or greater. The default value is 1.
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ScaleCapacityResponse(dict):
"""
The number of instances that can be used during this profile.
"""
def __init__(__self__, *,
default: str,
maximum: str,
minimum: str):
"""
The number of instances that can be used during this profile.
:param str default: the number of instances that will be set if metrics are not available for evaluation. The default is only used if the current instance count is lower than the default.
:param str maximum: the maximum number of instances for the resource. The actual maximum number of instances is limited by the cores that are available in the subscription.
:param str minimum: the minimum number of instances for the resource.
"""
pulumi.set(__self__, "default", default)
pulumi.set(__self__, "maximum", maximum)
pulumi.set(__self__, "minimum", minimum)
@property
@pulumi.getter
def default(self) -> str:
"""
the number of instances that will be set if metrics are not available for evaluation. The default is only used if the current instance count is lower than the default.
"""
return pulumi.get(self, "default")
@property
@pulumi.getter
def maximum(self) -> str:
"""
the maximum number of instances for the resource. The actual maximum number of instances is limited by the cores that are available in the subscription.
"""
return pulumi.get(self, "maximum")
@property
@pulumi.getter
def minimum(self) -> str:
"""
the minimum number of instances for the resource.
"""
return pulumi.get(self, "minimum")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ScaleRuleMetricDimensionResponse(dict):
"""
Specifies an auto scale rule metric dimension.
"""
def __init__(__self__, *,
dimension_name: str,
operator: str,
values: Sequence[str]):
"""
Specifies an auto scale rule metric dimension.
:param str dimension_name: Name of the dimension.
:param str operator: the dimension operator. Only 'Equals' and 'NotEquals' are supported. 'Equals' being equal to any of the values. 'NotEquals' being not equal to all of the values
:param Sequence[str] values: list of dimension values. For example: ["App1","App2"].
"""
pulumi.set(__self__, "dimension_name", dimension_name)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter(name="dimensionName")
def dimension_name(self) -> str:
"""
Name of the dimension.
"""
return pulumi.get(self, "dimension_name")
@property
@pulumi.getter
def operator(self) -> str:
"""
the dimension operator. Only 'Equals' and 'NotEquals' are supported. 'Equals' being equal to any of the values. 'NotEquals' being not equal to all of the values
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
list of dimension values. For example: ["App1","App2"].
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ScaleRuleResponse(dict):
"""
A rule that provide the triggers and parameters for the scaling action.
"""
def __init__(__self__, *,
metric_trigger: 'outputs.MetricTriggerResponse',
scale_action: 'outputs.ScaleActionResponse'):
"""
A rule that provide the triggers and parameters for the scaling action.
:param 'MetricTriggerResponseArgs' metric_trigger: the trigger that results in a scaling action.
:param 'ScaleActionResponseArgs' scale_action: the parameters for the scaling action.
"""
pulumi.set(__self__, "metric_trigger", metric_trigger)
pulumi.set(__self__, "scale_action", scale_action)
@property
@pulumi.getter(name="metricTrigger")
def metric_trigger(self) -> 'outputs.MetricTriggerResponse':
"""
the trigger that results in a scaling action.
"""
return pulumi.get(self, "metric_trigger")
@property
@pulumi.getter(name="scaleAction")
def scale_action(self) -> 'outputs.ScaleActionResponse':
"""
the parameters for the scaling action.
"""
return pulumi.get(self, "scale_action")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ScheduleResponse(dict):
"""
Defines how often to run the search and the time interval.
"""
def __init__(__self__, *,
frequency_in_minutes: int,
time_window_in_minutes: int):
"""
Defines how often to run the search and the time interval.
:param int frequency_in_minutes: frequency (in minutes) at which rule condition should be evaluated.
:param int time_window_in_minutes: Time window for which data needs to be fetched for query (should be greater than or equal to frequencyInMinutes).
"""
pulumi.set(__self__, "frequency_in_minutes", frequency_in_minutes)
pulumi.set(__self__, "time_window_in_minutes", time_window_in_minutes)
@property
@pulumi.getter(name="frequencyInMinutes")
def frequency_in_minutes(self) -> int:
"""
frequency (in minutes) at which rule condition should be evaluated.
"""
return pulumi.get(self, "frequency_in_minutes")
@property
@pulumi.getter(name="timeWindowInMinutes")
def time_window_in_minutes(self) -> int:
"""
Time window for which data needs to be fetched for query (should be greater than or equal to frequencyInMinutes).
"""
return pulumi.get(self, "time_window_in_minutes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SinkConfigurationResponse(dict):
def __init__(__self__, *,
kind: str):
pulumi.set(__self__, "kind", kind)
@property
@pulumi.getter
def kind(self) -> str:
return pulumi.get(self, "kind")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SmsReceiverResponse(dict):
"""
An SMS receiver.
"""
def __init__(__self__, *,
country_code: str,
name: str,
phone_number: str,
status: str):
"""
An SMS receiver.
:param str country_code: The country code of the SMS receiver.
:param str name: The name of the SMS receiver. Names must be unique across all receivers within an action group.
:param str phone_number: The phone number of the SMS receiver.
:param str status: The status of the receiver.
"""
pulumi.set(__self__, "country_code", country_code)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "phone_number", phone_number)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="countryCode")
def country_code(self) -> str:
"""
The country code of the SMS receiver.
"""
return pulumi.get(self, "country_code")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the SMS receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="phoneNumber")
def phone_number(self) -> str:
"""
The phone number of the SMS receiver.
"""
return pulumi.get(self, "phone_number")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the receiver.
"""
return pulumi.get(self, "status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SourceResponse(dict):
"""
Specifies the log search query.
"""
def __init__(__self__, *,
data_source_id: str,
authorized_resources: Optional[Sequence[str]] = None,
query: Optional[str] = None,
query_type: Optional[str] = None):
"""
Specifies the log search query.
:param str data_source_id: The resource uri over which log search query is to be run.
:param Sequence[str] authorized_resources: List of Resource referred into query
:param str query: Log search query. Required for action type - AlertingAction
:param str query_type: Set value to 'ResultCount' .
"""
pulumi.set(__self__, "data_source_id", data_source_id)
if authorized_resources is not None:
pulumi.set(__self__, "authorized_resources", authorized_resources)
if query is not None:
pulumi.set(__self__, "query", query)
if query_type is not None:
pulumi.set(__self__, "query_type", query_type)
@property
@pulumi.getter(name="dataSourceId")
def data_source_id(self) -> str:
"""
The resource uri over which log search query is to be run.
"""
return pulumi.get(self, "data_source_id")
@property
@pulumi.getter(name="authorizedResources")
def authorized_resources(self) -> Optional[Sequence[str]]:
"""
List of Resource referred into query
"""
return pulumi.get(self, "authorized_resources")
@property
@pulumi.getter
def query(self) -> Optional[str]:
"""
Log search query. Required for action type - AlertingAction
"""
return pulumi.get(self, "query")
@property
@pulumi.getter(name="queryType")
def query_type(self) -> Optional[str]:
"""
Set value to 'ResultCount' .
"""
return pulumi.get(self, "query_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SubscriptionLogSettingsResponse(dict):
"""
Part of Subscription diagnostic setting. Specifies the settings for a particular log.
"""
def __init__(__self__, *,
enabled: bool,
category: Optional[str] = None):
"""
Part of Subscription diagnostic setting. Specifies the settings for a particular log.
:param bool enabled: a value indicating whether this log is enabled.
:param str category: Name of a Subscription Diagnostic Log category for a resource type this setting is applied to.
"""
pulumi.set(__self__, "enabled", enabled)
if category is not None:
pulumi.set(__self__, "category", category)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
a value indicating whether this log is enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def category(self) -> Optional[str]:
"""
Name of a Subscription Diagnostic Log category for a resource type this setting is applied to.
"""
return pulumi.get(self, "category")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SyslogDataSourceResponse(dict):
"""
Definition of which syslog data will be collected and how it will be collected.
Only collected from Linux machines.
"""
def __init__(__self__, *,
facility_names: Sequence[str],
name: str,
streams: Sequence[str],
log_levels: Optional[Sequence[str]] = None):
"""
Definition of which syslog data will be collected and how it will be collected.
Only collected from Linux machines.
:param Sequence[str] facility_names: The list of facility names.
:param str name: A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
:param Sequence[str] streams: List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
:param Sequence[str] log_levels: The log levels to collect.
"""
pulumi.set(__self__, "facility_names", facility_names)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "streams", streams)
if log_levels is not None:
pulumi.set(__self__, "log_levels", log_levels)
@property
@pulumi.getter(name="facilityNames")
def facility_names(self) -> Sequence[str]:
"""
The list of facility names.
"""
return pulumi.get(self, "facility_names")
@property
@pulumi.getter
def name(self) -> str:
"""
A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def streams(self) -> Sequence[str]:
"""
List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
"""
return pulumi.get(self, "streams")
@property
@pulumi.getter(name="logLevels")
def log_levels(self) -> Optional[Sequence[str]]:
"""
The log levels to collect.
"""
return pulumi.get(self, "log_levels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ThresholdRuleConditionResponse(dict):
"""
A rule condition based on a metric crossing a threshold.
"""
def __init__(__self__, *,
odata_type: str,
operator: str,
threshold: float,
data_source: Optional[Any] = None,
time_aggregation: Optional[str] = None,
window_size: Optional[str] = None):
"""
A rule condition based on a metric crossing a threshold.
:param str odata_type: specifies the type of condition. This can be one of three types: ManagementEventRuleCondition (occurrences of management events), LocationThresholdRuleCondition (based on the number of failures of a web test), and ThresholdRuleCondition (based on the threshold of a metric).
Expected value is 'Microsoft.Azure.Management.Insights.Models.ThresholdRuleCondition'.
:param str operator: the operator used to compare the data and the threshold.
:param float threshold: the threshold value that activates the alert.
:param Union['RuleManagementEventDataSourceResponseArgs', 'RuleMetricDataSourceResponseArgs'] data_source: the resource from which the rule collects its data. For this type dataSource will always be of type RuleMetricDataSource.
:param str time_aggregation: the time aggregation operator. How the data that are collected should be combined over time. The default value is the PrimaryAggregationType of the Metric.
:param str window_size: the period of time (in ISO 8601 duration format) that is used to monitor alert activity based on the threshold. If specified then it must be between 5 minutes and 1 day.
"""
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Management.Insights.Models.ThresholdRuleCondition')
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "threshold", threshold)
if data_source is not None:
pulumi.set(__self__, "data_source", data_source)
if time_aggregation is not None:
pulumi.set(__self__, "time_aggregation", time_aggregation)
if window_size is not None:
pulumi.set(__self__, "window_size", window_size)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of condition. This can be one of three types: ManagementEventRuleCondition (occurrences of management events), LocationThresholdRuleCondition (based on the number of failures of a web test), and ThresholdRuleCondition (based on the threshold of a metric).
Expected value is 'Microsoft.Azure.Management.Insights.Models.ThresholdRuleCondition'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter
def operator(self) -> str:
"""
the operator used to compare the data and the threshold.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def threshold(self) -> float:
"""
the threshold value that activates the alert.
"""
return pulumi.get(self, "threshold")
@property
@pulumi.getter(name="dataSource")
def data_source(self) -> Optional[Any]:
"""
the resource from which the rule collects its data. For this type dataSource will always be of type RuleMetricDataSource.
"""
return pulumi.get(self, "data_source")
@property
@pulumi.getter(name="timeAggregation")
def time_aggregation(self) -> Optional[str]:
"""
the time aggregation operator. How the data that are collected should be combined over time. The default value is the PrimaryAggregationType of the Metric.
"""
return pulumi.get(self, "time_aggregation")
@property
@pulumi.getter(name="windowSize")
def window_size(self) -> Optional[str]:
"""
the period of time (in ISO 8601 duration format) that is used to monitor alert activity based on the threshold. If specified then it must be between 5 minutes and 1 day.
"""
return pulumi.get(self, "window_size")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class TimeWindowResponse(dict):
"""
A specific date-time for the profile.
"""
def __init__(__self__, *,
end: str,
start: str,
time_zone: Optional[str] = None):
"""
A specific date-time for the profile.
:param str end: the end time for the profile in ISO 8601 format.
:param str start: the start time for the profile in ISO 8601 format.
:param str time_zone: the timezone of the start and end times for the profile. Some examples of valid time zones are: Dateline Standard Time, UTC-11, Hawaiian Standard Time, Alaskan Standard Time, Pacific Standard Time (Mexico), Pacific Standard Time, US Mountain Standard Time, Mountain Standard Time (Mexico), Mountain Standard Time, Central America Standard Time, Central Standard Time, Central Standard Time (Mexico), Canada Central Standard Time, SA Pacific Standard Time, Eastern Standard Time, US Eastern Standard Time, Venezuela Standard Time, Paraguay Standard Time, Atlantic Standard Time, Central Brazilian Standard Time, SA Western Standard Time, Pacific SA Standard Time, Newfoundland Standard Time, E. South America Standard Time, Argentina Standard Time, SA Eastern Standard Time, Greenland Standard Time, Montevideo Standard Time, Bahia Standard Time, UTC-02, Mid-Atlantic Standard Time, Azores Standard Time, Cape Verde Standard Time, Morocco Standard Time, UTC, GMT Standard Time, Greenwich Standard Time, W. Europe Standard Time, Central Europe Standard Time, Romance Standard Time, Central European Standard Time, W. Central Africa Standard Time, Namibia Standard Time, Jordan Standard Time, GTB Standard Time, Middle East Standard Time, Egypt Standard Time, Syria Standard Time, E. Europe Standard Time, South Africa Standard Time, FLE Standard Time, Turkey Standard Time, Israel Standard Time, Kaliningrad Standard Time, Libya Standard Time, Arabic Standard Time, Arab Standard Time, Belarus Standard Time, Russian Standard Time, E. Africa Standard Time, Iran Standard Time, Arabian Standard Time, Azerbaijan Standard Time, Russia Time Zone 3, Mauritius Standard Time, Georgian Standard Time, Caucasus Standard Time, Afghanistan Standard Time, West Asia Standard Time, Ekaterinburg Standard Time, Pakistan Standard Time, India Standard Time, Sri Lanka Standard Time, Nepal Standard Time, Central Asia Standard Time, Bangladesh Standard Time, N. Central Asia Standard Time, Myanmar Standard Time, SE Asia Standard Time, North Asia Standard Time, China Standard Time, North Asia East Standard Time, Singapore Standard Time, W. Australia Standard Time, Taipei Standard Time, Ulaanbaatar Standard Time, Tokyo Standard Time, Korea Standard Time, Yakutsk Standard Time, Cen. Australia Standard Time, AUS Central Standard Time, E. Australia Standard Time, AUS Eastern Standard Time, West Pacific Standard Time, Tasmania Standard Time, Magadan Standard Time, Vladivostok Standard Time, Russia Time Zone 10, Central Pacific Standard Time, Russia Time Zone 11, New Zealand Standard Time, UTC+12, Fiji Standard Time, Kamchatka Standard Time, Tonga Standard Time, Samoa Standard Time, Line Islands Standard Time
"""
pulumi.set(__self__, "end", end)
pulumi.set(__self__, "start", start)
if time_zone is not None:
pulumi.set(__self__, "time_zone", time_zone)
@property
@pulumi.getter
def end(self) -> str:
"""
the end time for the profile in ISO 8601 format.
"""
return pulumi.get(self, "end")
@property
@pulumi.getter
def start(self) -> str:
"""
the start time for the profile in ISO 8601 format.
"""
return pulumi.get(self, "start")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> Optional[str]:
"""
the timezone of the start and end times for the profile. Some examples of valid time zones are: Dateline Standard Time, UTC-11, Hawaiian Standard Time, Alaskan Standard Time, Pacific Standard Time (Mexico), Pacific Standard Time, US Mountain Standard Time, Mountain Standard Time (Mexico), Mountain Standard Time, Central America Standard Time, Central Standard Time, Central Standard Time (Mexico), Canada Central Standard Time, SA Pacific Standard Time, Eastern Standard Time, US Eastern Standard Time, Venezuela Standard Time, Paraguay Standard Time, Atlantic Standard Time, Central Brazilian Standard Time, SA Western Standard Time, Pacific SA Standard Time, Newfoundland Standard Time, E. South America Standard Time, Argentina Standard Time, SA Eastern Standard Time, Greenland Standard Time, Montevideo Standard Time, Bahia Standard Time, UTC-02, Mid-Atlantic Standard Time, Azores Standard Time, Cape Verde Standard Time, Morocco Standard Time, UTC, GMT Standard Time, Greenwich Standard Time, W. Europe Standard Time, Central Europe Standard Time, Romance Standard Time, Central European Standard Time, W. Central Africa Standard Time, Namibia Standard Time, Jordan Standard Time, GTB Standard Time, Middle East Standard Time, Egypt Standard Time, Syria Standard Time, E. Europe Standard Time, South Africa Standard Time, FLE Standard Time, Turkey Standard Time, Israel Standard Time, Kaliningrad Standard Time, Libya Standard Time, Arabic Standard Time, Arab Standard Time, Belarus Standard Time, Russian Standard Time, E. Africa Standard Time, Iran Standard Time, Arabian Standard Time, Azerbaijan Standard Time, Russia Time Zone 3, Mauritius Standard Time, Georgian Standard Time, Caucasus Standard Time, Afghanistan Standard Time, West Asia Standard Time, Ekaterinburg Standard Time, Pakistan Standard Time, India Standard Time, Sri Lanka Standard Time, Nepal Standard Time, Central Asia Standard Time, Bangladesh Standard Time, N. Central Asia Standard Time, Myanmar Standard Time, SE Asia Standard Time, North Asia Standard Time, China Standard Time, North Asia East Standard Time, Singapore Standard Time, W. Australia Standard Time, Taipei Standard Time, Ulaanbaatar Standard Time, Tokyo Standard Time, Korea Standard Time, Yakutsk Standard Time, Cen. Australia Standard Time, AUS Central Standard Time, E. Australia Standard Time, AUS Eastern Standard Time, West Pacific Standard Time, Tasmania Standard Time, Magadan Standard Time, Vladivostok Standard Time, Russia Time Zone 10, Central Pacific Standard Time, Russia Time Zone 11, New Zealand Standard Time, UTC+12, Fiji Standard Time, Kamchatka Standard Time, Tonga Standard Time, Samoa Standard Time, Line Islands Standard Time
"""
return pulumi.get(self, "time_zone")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class TriggerConditionResponse(dict):
"""
The condition that results in the Log Search rule.
"""
def __init__(__self__, *,
threshold: float,
threshold_operator: str,
metric_trigger: Optional['outputs.LogMetricTriggerResponse'] = None):
"""
The condition that results in the Log Search rule.
:param float threshold: Result or count threshold based on which rule should be triggered.
:param str threshold_operator: Evaluation operation for rule - 'GreaterThan' or 'LessThan.
:param 'LogMetricTriggerResponseArgs' metric_trigger: Trigger condition for metric query rule
"""
pulumi.set(__self__, "threshold", threshold)
pulumi.set(__self__, "threshold_operator", threshold_operator)
if metric_trigger is not None:
pulumi.set(__self__, "metric_trigger", metric_trigger)
@property
@pulumi.getter
def threshold(self) -> float:
"""
Result or count threshold based on which rule should be triggered.
"""
return pulumi.get(self, "threshold")
@property
@pulumi.getter(name="thresholdOperator")
def threshold_operator(self) -> str:
"""
Evaluation operation for rule - 'GreaterThan' or 'LessThan.
"""
return pulumi.get(self, "threshold_operator")
@property
@pulumi.getter(name="metricTrigger")
def metric_trigger(self) -> Optional['outputs.LogMetricTriggerResponse']:
"""
Trigger condition for metric query rule
"""
return pulumi.get(self, "metric_trigger")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class UserAssignedIdentitiesResponse(dict):
"""
Customer Managed Identity
"""
def __init__(__self__, *,
principal_id: str,
tenant_id: str):
"""
Customer Managed Identity
:param str principal_id: The principal ID of resource identity.
:param str tenant_id: The tenant ID of resource.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal ID of resource identity.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant ID of resource.
"""
return pulumi.get(self, "tenant_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class VoiceReceiverResponse(dict):
"""
A voice receiver.
"""
def __init__(__self__, *,
country_code: str,
name: str,
phone_number: str):
"""
A voice receiver.
:param str country_code: The country code of the voice receiver.
:param str name: The name of the voice receiver. Names must be unique across all receivers within an action group.
:param str phone_number: The phone number of the voice receiver.
"""
pulumi.set(__self__, "country_code", country_code)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "phone_number", phone_number)
@property
@pulumi.getter(name="countryCode")
def country_code(self) -> str:
"""
The country code of the voice receiver.
"""
return pulumi.get(self, "country_code")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the voice receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="phoneNumber")
def phone_number(self) -> str:
"""
The phone number of the voice receiver.
"""
return pulumi.get(self, "phone_number")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WebTestGeolocationResponse(dict):
"""
Geo-physical location to run a web test from. You must specify one or more locations for the test to run from.
"""
def __init__(__self__, *,
location: Optional[str] = None):
"""
Geo-physical location to run a web test from. You must specify one or more locations for the test to run from.
:param str location: Location ID for the webtest to run from.
"""
if location is not None:
pulumi.set(__self__, "location", location)
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Location ID for the webtest to run from.
"""
return pulumi.get(self, "location")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WebTestPropertiesResponseConfiguration(dict):
"""
An XML configuration specification for a WebTest.
"""
def __init__(__self__, *,
web_test: Optional[str] = None):
"""
An XML configuration specification for a WebTest.
:param str web_test: The XML specification of a WebTest to run against an application.
"""
if web_test is not None:
pulumi.set(__self__, "web_test", web_test)
@property
@pulumi.getter(name="webTest")
def web_test(self) -> Optional[str]:
"""
The XML specification of a WebTest to run against an application.
"""
return pulumi.get(self, "web_test")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WebhookNotificationResponse(dict):
"""
Webhook notification of an autoscale event.
"""
def __init__(__self__, *,
properties: Optional[Mapping[str, str]] = None,
service_uri: Optional[str] = None):
"""
Webhook notification of an autoscale event.
:param Mapping[str, str] properties: a property bag of settings. This value can be empty.
:param str service_uri: the service address to receive the notification.
"""
if properties is not None:
pulumi.set(__self__, "properties", properties)
if service_uri is not None:
pulumi.set(__self__, "service_uri", service_uri)
@property
@pulumi.getter
def properties(self) -> Optional[Mapping[str, str]]:
"""
a property bag of settings. This value can be empty.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="serviceUri")
def service_uri(self) -> Optional[str]:
"""
the service address to receive the notification.
"""
return pulumi.get(self, "service_uri")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WebhookReceiverResponse(dict):
"""
A webhook receiver.
"""
def __init__(__self__, *,
name: str,
service_uri: str,
use_common_alert_schema: bool,
identifier_uri: Optional[str] = None,
object_id: Optional[str] = None,
tenant_id: Optional[str] = None,
use_aad_auth: Optional[bool] = None):
"""
A webhook receiver.
:param str name: The name of the webhook receiver. Names must be unique across all receivers within an action group.
:param str service_uri: The URI where webhooks should be sent.
:param bool use_common_alert_schema: Indicates whether to use common alert schema.
:param str identifier_uri: Indicates the identifier uri for aad auth.
:param str object_id: Indicates the webhook app object Id for aad auth.
:param str tenant_id: Indicates the tenant id for aad auth.
:param bool use_aad_auth: Indicates whether or not use AAD authentication.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "service_uri", service_uri)
pulumi.set(__self__, "use_common_alert_schema", use_common_alert_schema)
if identifier_uri is not None:
pulumi.set(__self__, "identifier_uri", identifier_uri)
if object_id is not None:
pulumi.set(__self__, "object_id", object_id)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if use_aad_auth is None:
use_aad_auth = False
if use_aad_auth is not None:
pulumi.set(__self__, "use_aad_auth", use_aad_auth)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the webhook receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceUri")
def service_uri(self) -> str:
"""
The URI where webhooks should be sent.
"""
return pulumi.get(self, "service_uri")
@property
@pulumi.getter(name="useCommonAlertSchema")
def use_common_alert_schema(self) -> bool:
"""
Indicates whether to use common alert schema.
"""
return pulumi.get(self, "use_common_alert_schema")
@property
@pulumi.getter(name="identifierUri")
def identifier_uri(self) -> Optional[str]:
"""
Indicates the identifier uri for aad auth.
"""
return pulumi.get(self, "identifier_uri")
@property
@pulumi.getter(name="objectId")
def object_id(self) -> Optional[str]:
"""
Indicates the webhook app object Id for aad auth.
"""
return pulumi.get(self, "object_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[str]:
"""
Indicates the tenant id for aad auth.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter(name="useAadAuth")
def use_aad_auth(self) -> Optional[bool]:
"""
Indicates whether or not use AAD authentication.
"""
return pulumi.get(self, "use_aad_auth")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WebtestLocationAvailabilityCriteriaResponse(dict):
"""
Specifies the metric alert rule criteria for a web test resource.
"""
def __init__(__self__, *,
component_id: str,
failed_location_count: float,
odata_type: str,
web_test_id: str):
"""
Specifies the metric alert rule criteria for a web test resource.
:param str component_id: The Application Insights resource Id.
:param float failed_location_count: The number of failed locations.
:param str odata_type: specifies the type of the alert criteria.
Expected value is 'Microsoft.Azure.Monitor.WebtestLocationAvailabilityCriteria'.
:param str web_test_id: The Application Insights web test Id.
"""
pulumi.set(__self__, "component_id", component_id)
pulumi.set(__self__, "failed_location_count", failed_location_count)
pulumi.set(__self__, "odata_type", 'Microsoft.Azure.Monitor.WebtestLocationAvailabilityCriteria')
pulumi.set(__self__, "web_test_id", web_test_id)
@property
@pulumi.getter(name="componentId")
def component_id(self) -> str:
"""
The Application Insights resource Id.
"""
return pulumi.get(self, "component_id")
@property
@pulumi.getter(name="failedLocationCount")
def failed_location_count(self) -> float:
"""
The number of failed locations.
"""
return pulumi.get(self, "failed_location_count")
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> str:
"""
specifies the type of the alert criteria.
Expected value is 'Microsoft.Azure.Monitor.WebtestLocationAvailabilityCriteria'.
"""
return pulumi.get(self, "odata_type")
@property
@pulumi.getter(name="webTestId")
def web_test_id(self) -> str:
"""
The Application Insights web test Id.
"""
return pulumi.get(self, "web_test_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WindowsEventLogDataSourceResponse(dict):
"""
Definition of which Windows Event Log events will be collected and how they will be collected.
Only collected from Windows machines.
"""
def __init__(__self__, *,
name: str,
scheduled_transfer_period: str,
streams: Sequence[str],
x_path_queries: Sequence[str]):
"""
Definition of which Windows Event Log events will be collected and how they will be collected.
Only collected from Windows machines.
:param str name: A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
:param str scheduled_transfer_period: The interval between data uploads (scheduled transfers), rounded up to the nearest minute.
:param Sequence[str] streams: List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
:param Sequence[str] x_path_queries: A list of Windows Event Log queries in XPATH format.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "scheduled_transfer_period", scheduled_transfer_period)
pulumi.set(__self__, "streams", streams)
pulumi.set(__self__, "x_path_queries", x_path_queries)
@property
@pulumi.getter
def name(self) -> str:
"""
A friendly name for the data source.
This name should be unique across all data sources (regardless of type) within the data collection rule.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="scheduledTransferPeriod")
def scheduled_transfer_period(self) -> str:
"""
The interval between data uploads (scheduled transfers), rounded up to the nearest minute.
"""
return pulumi.get(self, "scheduled_transfer_period")
@property
@pulumi.getter
def streams(self) -> Sequence[str]:
"""
List of streams that this data source will be sent to.
A stream indicates what schema will be used for this data and usually what table in Log Analytics the data will be sent to.
"""
return pulumi.get(self, "streams")
@property
@pulumi.getter(name="xPathQueries")
def x_path_queries(self) -> Sequence[str]:
"""
A list of Windows Event Log queries in XPATH format.
"""
return pulumi.get(self, "x_path_queries")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WorkbookTemplateGalleryResponse(dict):
"""
Gallery information for a workbook template.
"""
def __init__(__self__, *,
category: Optional[str] = None,
name: Optional[str] = None,
order: Optional[int] = None,
resource_type: Optional[str] = None,
type: Optional[str] = None):
"""
Gallery information for a workbook template.
:param str category: Category for the gallery.
:param str name: Name of the workbook template in the gallery.
:param int order: Order of the template within the gallery.
:param str resource_type: Azure resource type supported by the gallery.
:param str type: Type of workbook supported by the workbook template.
"""
if category is not None:
pulumi.set(__self__, "category", category)
if name is not None:
pulumi.set(__self__, "name", name)
if order is not None:
pulumi.set(__self__, "order", order)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def category(self) -> Optional[str]:
"""
Category for the gallery.
"""
return pulumi.get(self, "category")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the workbook template in the gallery.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def order(self) -> Optional[int]:
"""
Order of the template within the gallery.
"""
return pulumi.get(self, "order")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[str]:
"""
Azure resource type supported by the gallery.
"""
return pulumi.get(self, "resource_type")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Type of workbook supported by the workbook template.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class WorkbookTemplateLocalizedGalleryResponse(dict):
"""
Localized template data and gallery information.
"""
def __init__(__self__, *,
galleries: Optional[Sequence['outputs.WorkbookTemplateGalleryResponse']] = None,
template_data: Optional[Any] = None):
"""
Localized template data and gallery information.
:param Sequence['WorkbookTemplateGalleryResponseArgs'] galleries: Workbook galleries supported by the template.
:param Any template_data: Valid JSON object containing workbook template payload.
"""
if galleries is not None:
pulumi.set(__self__, "galleries", galleries)
if template_data is not None:
pulumi.set(__self__, "template_data", template_data)
@property
@pulumi.getter
def galleries(self) -> Optional[Sequence['outputs.WorkbookTemplateGalleryResponse']]:
"""
Workbook galleries supported by the template.
"""
return pulumi.get(self, "galleries")
@property
@pulumi.getter(name="templateData")
def template_data(self) -> Optional[Any]:
"""
Valid JSON object containing workbook template payload.
"""
return pulumi.get(self, "template_data")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| 41.145043
| 2,729
| 0.664308
| 22,096
| 190,913
| 5.551503
| 0.045438
| 0.039131
| 0.031052
| 0.045383
| 0.788328
| 0.749075
| 0.706145
| 0.668294
| 0.634854
| 0.607079
| 0
| 0.001145
| 0.249449
| 190,913
| 4,639
| 2,730
| 41.153912
| 0.85491
| 0.39418
| 0
| 0.612957
| 1
| 0.000831
| 0.150061
| 0.077237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192276
| false
| 0
| 0.002907
| 0.040698
| 0.387458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7155888b1746770c0daa2511a11495b6accff481
| 222
|
py
|
Python
|
myscrumy/idowuscrumy/admin.py
|
Manuelkila/myscrumy
|
e60eee4c5d20e484c6a3a53fc352f3bd28f0a33a
|
[
"Apache-2.0"
] | null | null | null |
myscrumy/idowuscrumy/admin.py
|
Manuelkila/myscrumy
|
e60eee4c5d20e484c6a3a53fc352f3bd28f0a33a
|
[
"Apache-2.0"
] | null | null | null |
myscrumy/idowuscrumy/admin.py
|
Manuelkila/myscrumy
|
e60eee4c5d20e484c6a3a53fc352f3bd28f0a33a
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import GoalStatus, ScrumyGoals, ScrumyHistory
admin.site.register(GoalStatus)
admin.site.register(ScrumyGoals)
admin.site.register(ScrumyHistory)
| 27.75
| 58
| 0.833333
| 27
| 222
| 6.851852
| 0.481481
| 0.145946
| 0.275676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085586
| 222
| 8
| 59
| 27.75
| 0.91133
| 0.117117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7175a7d0a428595998f9a1c3008cbfb69ae961c5
| 219
|
py
|
Python
|
pipenv/patched/notpip/_vendor/rich/diagnose.py
|
sthagen/pipenv
|
0924f75fd1004c848ea67d4272315eda4210b352
|
[
"MIT"
] | 23
|
2017-01-20T01:18:31.000Z
|
2017-01-20T17:25:11.000Z
|
pipenv/patched/notpip/_vendor/rich/diagnose.py
|
sthagen/pipenv
|
0924f75fd1004c848ea67d4272315eda4210b352
|
[
"MIT"
] | 1
|
2017-01-20T05:13:58.000Z
|
2017-01-20T05:13:58.000Z
|
pipenv/patched/notpip/_vendor/rich/diagnose.py
|
sthagen/pipenv
|
0924f75fd1004c848ea67d4272315eda4210b352
|
[
"MIT"
] | null | null | null |
if __name__ == "__main__": # pragma: no cover
from pipenv.patched.notpip._vendor.rich.console import Console
from pipenv.patched.notpip._vendor.rich import inspect
console = Console()
inspect(console)
| 31.285714
| 66
| 0.730594
| 27
| 219
| 5.555556
| 0.555556
| 0.133333
| 0.226667
| 0.306667
| 0.44
| 0.44
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173516
| 219
| 6
| 67
| 36.5
| 0.828729
| 0.073059
| 0
| 0
| 0
| 0
| 0.039801
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
71ba3405e11fa0dc7ee4a4d6cea40f9d0890eaa8
| 57
|
py
|
Python
|
pylib/__init__.py
|
xenoner1506/toolkit
|
89d6bc01af718efa7c65aeaba28f902b84d6c2c1
|
[
"MIT"
] | null | null | null |
pylib/__init__.py
|
xenoner1506/toolkit
|
89d6bc01af718efa7c65aeaba28f902b84d6c2c1
|
[
"MIT"
] | null | null | null |
pylib/__init__.py
|
xenoner1506/toolkit
|
89d6bc01af718efa7c65aeaba28f902b84d6c2c1
|
[
"MIT"
] | null | null | null |
import os
import DATA
import parDict
import plot_lines
| 8.142857
| 17
| 0.824561
| 9
| 57
| 5.111111
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 57
| 6
| 18
| 9.5
| 0.978723
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0832a19962fec4897c3d0d2c9cf4080d5b7a1c8
| 142
|
py
|
Python
|
abc/abc012/abc012c.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | 1
|
2019-08-21T00:49:34.000Z
|
2019-08-21T00:49:34.000Z
|
abc/abc012/abc012c.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
abc/abc012/abc012c.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
N = int(input())
t = 2025 - N
for i in range(1, 10):
for j in range(1, 10):
if i * j == t:
print('%d x %d' % (i, j))
| 17.75
| 37
| 0.408451
| 28
| 142
| 2.071429
| 0.571429
| 0.241379
| 0.275862
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114943
| 0.387324
| 142
| 7
| 38
| 20.285714
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 0.049296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e0958337f02b26f49f5fc99d347d82d5d96671ff
| 3,596
|
py
|
Python
|
Real.py
|
PolynomialTime/ASONAM2019
|
2cf285461fe17d946702e52b9debf882d46205af
|
[
"MIT"
] | null | null | null |
Real.py
|
PolynomialTime/ASONAM2019
|
2cf285461fe17d946702e52b9debf882d46205af
|
[
"MIT"
] | null | null | null |
Real.py
|
PolynomialTime/ASONAM2019
|
2cf285461fe17d946702e52b9debf882d46205af
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
import networkx as nx
# import matplotlib.pyplot as plt
import random
import os
import csv
import Greedy
import Replacement
class Real:
def College(self):
file_name = "CollegeMsg.txt"
G = nx.read_edgelist(file_name, create_using=nx.DiGraph()) # directed graphs
# print G.nodes
return G
def output_3(self, heu):
datas_greedy = [['Real1']]
datas_R1 = [['Real1']]
datas_R2 = [['Real1']]
# 'Max':
for d in range(0, 50):
avg_greedy = 0.0
avg_R1 = 0.0
avg_R2 = 0.0
for i in range(0, 5):
G = self.College()
g = Greedy.Greedy(G, d, 1, 2, heu)
g.run()
avg_greedy += len(g.D1)
r1 = Replacement.ReplacementA(G, d, 1, 2, heu)
r1.run()
avg_R1 += len(r1.D1)
r2 = Replacement.ReplacementB(G, d, 1, 2, heu)
r2.run()
avg_R2 += len(r2.D1)
print(i)
avg_greedy = avg_greedy / 10
avg_R1 = avg_R1 / 10
avg_R2 = avg_R2 / 10
datas_greedy.append(avg_greedy)
datas_R1.append(avg_R1)
datas_R2.append(avg_R2)
with open('greedy_' + heu + '.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in datas_greedy:
writer.writerow(row)
with open('r1_' + heu + '.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in datas_R1:
writer.writerow(row)
with open('r2_' + heu + '.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in datas_R2:
writer.writerow(row)
def output_4(self, heu): # fix d
datas_greedy = [['Real1']]
datas_R1 = [['Real1']]
datas_R2 = [['Real1']]
d = 10
for rho1 in range (1,11):
for rho2 in range(1, 11):
avg_greedy = 0.0
avg_R1 = 0.0
avg_R2 = 0.0
for i in range(0, 5):
G = self.College()
g = Greedy.Greedy(G, d, rho1, rho2, heu)
g.run()
avg_greedy += len(g.D1)
r1 = Replacement.ReplacementA(G, d, rho1, rho2, heu)
r1.run()
avg_R1 += len(r1.D1)
r2 = Replacement.ReplacementB(G, d, rho1, rho2, heu)
r2.run()
avg_R2 += len(r2.D1)
print(i)
avg_greedy = avg_greedy / 10
avg_R1 = avg_R1 / 10
avg_R2 = avg_R2 / 10
datas_greedy.append(avg_greedy)
datas_R1.append(avg_R1)
datas_R2.append(avg_R2)
with open('2_greedy_' + heu + '.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in datas_greedy:
writer.writerow(row)
with open('2_r1_' + heu + '.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in datas_R1:
writer.writerow(row)
with open('2_r2_' + heu + '.csv', 'w', newline='') as f:
writer = csv.writer(f)
for row in datas_R2:
writer.writerow(row)
gen = Real()
gen.output_3('Max')
gen.output_3('Btw')
gen.output_3('Min')
gen.output_4('Max')
gen.output_4('Btw')
gen.output_4('Min')
| 28.539683
| 85
| 0.457453
| 450
| 3,596
| 3.502222
| 0.171111
| 0.057107
| 0.02665
| 0.053299
| 0.738579
| 0.719543
| 0.718274
| 0.718274
| 0.718274
| 0.667513
| 0
| 0.060125
| 0.421858
| 3,596
| 126
| 86
| 28.539683
| 0.697932
| 0.025028
| 0
| 0.625
| 0
| 0
| 0.035429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.0625
| 0
| 0.114583
| 0.020833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e0b346e40e93189fbb4c8325499c95cc233f1dbb
| 54
|
py
|
Python
|
dbsync/exceptions.py
|
jborland0/dbsync
|
15ed45ac938f5c43f20136731048476c5ad0cb31
|
[
"MIT"
] | null | null | null |
dbsync/exceptions.py
|
jborland0/dbsync
|
15ed45ac938f5c43f20136731048476c5ad0cb31
|
[
"MIT"
] | null | null | null |
dbsync/exceptions.py
|
jborland0/dbsync
|
15ed45ac938f5c43f20136731048476c5ad0cb31
|
[
"MIT"
] | null | null | null |
class DatabaseStructureException(Exception):
pass
| 18
| 44
| 0.814815
| 4
| 54
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 54
| 2
| 45
| 27
| 0.93617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e0bab7fcb0d1e2a428c8d12c680b3e8c2a3af520
| 2,279
|
py
|
Python
|
python_modules/dagster/dagster_tests/cli_tests/snapshots/snap_test_config_scaffolder.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 2
|
2021-06-21T17:50:26.000Z
|
2021-06-21T19:14:23.000Z
|
python_modules/dagster/dagster_tests/cli_tests/snapshots/snap_test_config_scaffolder.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 7
|
2022-03-16T06:55:04.000Z
|
2022-03-18T07:03:25.000Z
|
python_modules/dagster/dagster_tests/cli_tests/snapshots/snap_test_config_scaffolder.py
|
bitdotioinc/dagster
|
4fe395a37b206b1a48b956fa5dd72bf698104cca
|
[
"Apache-2.0"
] | 1
|
2021-08-18T17:21:57.000Z
|
2021-08-18T17:21:57.000Z
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['test_basic_solids_config 1'] = {
'execution': {
'in_process': {
'config': {'marker_to_close': '', 'retries': {'disabled': {}, 'enabled': {}}}
},
'multiprocess': {
'config': {'max_concurrent': 0, 'retries': {'disabled': {}, 'enabled': {}}}
},
},
'loggers': {'console': {'config': {'log_level': '', 'name': ''}}},
'resources': {},
'solids': {'required_field_solid': {'config': {'required_int': 0}}},
'storage': {'filesystem': {'config': {'base_dir': ''}}, 'in_memory': {}},
'intermediate_storage': {'filesystem': {'config': {'base_dir': ''}}, 'in_memory': {}},
}
snapshots['test_two_modes 1'] = {'resources': {'value': {'config': {'mode_one_field': ''}}}}
snapshots['test_two_modes 2'] = {
'execution': {
'in_process': {
'config': {'marker_to_close': '', 'retries': {'disabled': {}, 'enabled': {}}}
},
'multiprocess': {
'config': {'max_concurrent': 0, 'retries': {'disabled': {}, 'enabled': {}}}
},
},
'loggers': {'console': {'config': {'log_level': '', 'name': ''}}},
'resources': {'value': {'config': {'mode_one_field': ''}}},
'solids': {},
'storage': {'filesystem': {'config': {'base_dir': ''}}, 'in_memory': {}},
'intermediate_storage': {'filesystem': {'config': {'base_dir': ''}}, 'in_memory': {}},
}
snapshots['test_two_modes 3'] = {'resources': {'value': {'config': {'mode_two_field': 0}}}}
snapshots['test_two_modes 4'] = {
'execution': {
'in_process': {
'config': {'marker_to_close': '', 'retries': {'disabled': {}, 'enabled': {}}}
},
'multiprocess': {
'config': {'max_concurrent': 0, 'retries': {'disabled': {}, 'enabled': {}}}
},
},
'loggers': {'console': {'config': {'log_level': '', 'name': ''}}},
'resources': {'value': {'config': {'mode_two_field': 0}}},
'solids': {},
'storage': {'filesystem': {'config': {'base_dir': ''}}, 'in_memory': {}},
'intermediate_storage': {'filesystem': {'config': {'base_dir': ''}}, 'in_memory': {}},
}
| 37.983333
| 92
| 0.520404
| 200
| 2,279
| 5.645
| 0.29
| 0.079717
| 0.116918
| 0.14349
| 0.79008
| 0.79008
| 0.75465
| 0.717449
| 0.717449
| 0.717449
| 0
| 0.007739
| 0.206231
| 2,279
| 59
| 93
| 38.627119
| 0.616363
| 0.027205
| 0
| 0.52
| 0
| 0
| 0.45935
| 0.01084
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e0be421ad1585f37ea4a1706b749313b803f10c3
| 180
|
py
|
Python
|
package_parser/package_parser/utils/__init__.py
|
Prajakta46/Lab-Semantic-Data-Web-Technologies-WS21-22
|
c7439149bb6658b2824001d156677a45b05ec6cd
|
[
"MIT"
] | null | null | null |
package_parser/package_parser/utils/__init__.py
|
Prajakta46/Lab-Semantic-Data-Web-Technologies-WS21-22
|
c7439149bb6658b2824001d156677a45b05ec6cd
|
[
"MIT"
] | 31
|
2022-02-10T11:01:51.000Z
|
2022-02-18T12:37:15.000Z
|
package_parser/package_parser/utils/__init__.py
|
Prajakta46/Lab-Semantic-Data-Web-Technologies-WS21-22
|
c7439149bb6658b2824001d156677a45b05ec6cd
|
[
"MIT"
] | null | null | null |
from ._ASTWalker import ASTWalker
from ._files import ensure_file_exists, initialize_and_read_exclude_file, list_files
from ._qnames import declaration_qname_to_name, parent_qname
| 45
| 84
| 0.883333
| 26
| 180
| 5.576923
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 180
| 3
| 85
| 60
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0f53c511ab2b62e1d9b668ab8337f8c43fe412b
| 7,266
|
py
|
Python
|
test/test_models.py
|
yyaodong/kerasMore
|
79775599fa2ea434c3e6208b76ef0f8f0dbec1b6
|
[
"MIT"
] | 1
|
2016-01-24T03:16:55.000Z
|
2016-01-24T03:16:55.000Z
|
test/test_models.py
|
nyan-girl/keras
|
3d0704cfe508438d0fcb6a0a00279b6420a711dd
|
[
"MIT"
] | null | null | null |
test/test_models.py
|
nyan-girl/keras
|
3d0704cfe508438d0fcb6a0a00279b6420a711dd
|
[
"MIT"
] | 3
|
2018-01-26T14:23:48.000Z
|
2021-05-20T06:59:44.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Merge
from keras.utils import np_utils
import numpy as np
nb_classes = 10
batch_size = 128
nb_epoch = 1
max_train_samples = 5000
max_test_samples = 1000
np.random.seed(1337) # for reproducibility
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000,784)[:max_train_samples]
X_test = X_test.reshape(10000,784)[:max_test_samples]
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)[:max_train_samples]
Y_test = np_utils.to_categorical(y_test, nb_classes)[:max_test_samples]
#########################
# sequential model test #
#########################
print('Test sequential')
model = Sequential()
model.add(Dense(784, 50))
model.add(Activation('relu'))
model.add(Dense(50, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=(X_test, Y_test))
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=(X_test, Y_test))
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate(X_train, Y_train, verbose=0)
print('score:', score)
if score < 0.25:
raise Exception('Score too low, learning issue.')
preds = model.predict(X_test, verbose=0)
classes = model.predict_classes(X_test, verbose=0)
model.get_config(verbose=1)
###################
# merge test: sum #
###################
print('Test merge: sum')
left = Sequential()
left.add(Dense(784, 50))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(784, 50))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='sum'))
model.add(Dense(50, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate([X_train, X_train], Y_train, verbose=0)
print('score:', score)
if score < 0.22:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test], verbose=0)
model.get_config(verbose=1)
###################
# merge test: concat #
###################
print('Test merge: concat')
left = Sequential()
left.add(Dense(784, 50))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(784, 50))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='concat'))
model.add(Dense(50*2, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate([X_train, X_train], Y_train, verbose=0)
print('score:', score)
if score < 0.22:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test], verbose=0)
model.get_config(verbose=1)
##########################
# test merge recursivity #
##########################
print('Test merge recursivity')
left = Sequential()
left.add(Dense(784, 50))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(784, 50))
right.add(Activation('relu'))
righter = Sequential()
righter.add(Dense(784, 50))
righter.add(Activation('relu'))
intermediate = Sequential()
intermediate.add(Merge([left, right], mode='sum'))
intermediate.add(Dense(50, 50))
intermediate.add(Activation('relu'))
model = Sequential()
model.add(Merge([intermediate, righter], mode='sum'))
model.add(Dense(50, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test, X_test], Y_test))
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test, X_test], Y_test))
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate([X_train, X_train, X_train], Y_train, verbose=0)
print('score:', score)
if score < 0.19:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test, X_test], verbose=0)
model.get_config(verbose=1)
model.save_weights('temp.h5')
model.load_weights('temp.h5')
score = model.evaluate([X_train, X_train, X_train], Y_train, verbose=0)
print('score:', score)
| 39.275676
| 173
| 0.74828
| 1,184
| 7,266
| 4.320946
| 0.094595
| 0.076231
| 0.043784
| 0.075059
| 0.795934
| 0.787725
| 0.783034
| 0.783034
| 0.774238
| 0.774238
| 0
| 0.026462
| 0.084641
| 7,266
| 184
| 174
| 39.48913
| 0.742745
| 0.028214
| 0
| 0.53125
| 0
| 0
| 0.065676
| 0.01398
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.054688
| 0
| 0.054688
| 0.078125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1cd46b84cc27721eaa750585651745f9d787ab07
| 226
|
py
|
Python
|
gen3_etl/utils/id_vendor.py
|
ohsu-comp-bio/gen3-etl
|
9114f75cc8c8085111152ce0ef686a8a12f67f8e
|
[
"MIT"
] | 1
|
2020-01-22T17:05:58.000Z
|
2020-01-22T17:05:58.000Z
|
gen3_etl/utils/id_vendor.py
|
ohsu-comp-bio/gen3-etl
|
9114f75cc8c8085111152ce0ef686a8a12f67f8e
|
[
"MIT"
] | 2
|
2019-02-08T23:24:58.000Z
|
2021-05-13T22:42:28.000Z
|
gen3_etl/utils/id_vendor.py
|
ohsu-comp-bio/gen3_etl
|
9114f75cc8c8085111152ce0ef686a8a12f67f8e
|
[
"MIT"
] | null | null | null |
import uuid
"""
A singleton to hold the generated uuid for each submitter_id
"""
def get_id(submitter_id):
"""Calculates idempotent uuid based on submitter_id."""
return uuid.uuid3(uuid.NAMESPACE_DNS, submitter_id)
| 20.545455
| 60
| 0.747788
| 33
| 226
| 4.939394
| 0.666667
| 0.269939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005263
| 0.159292
| 226
| 10
| 61
| 22.6
| 0.852632
| 0.216814
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1cdedf323ad6223ad6c30ab3a4ac1449e4a102a4
| 186
|
py
|
Python
|
example/exampleapp/admin.py
|
jerch/django-hidefield
|
dd3ce2acb6bacedd8936bba8ccdeb0642a562be8
|
[
"MIT"
] | 1
|
2020-11-24T22:11:58.000Z
|
2020-11-24T22:11:58.000Z
|
example/exampleapp/admin.py
|
jerch/django-hidefield
|
dd3ce2acb6bacedd8936bba8ccdeb0642a562be8
|
[
"MIT"
] | null | null | null |
example/exampleapp/admin.py
|
jerch/django-hidefield
|
dd3ce2acb6bacedd8936bba8ccdeb0642a562be8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from exampleapp.models import MyModel
admin.site.register(MyModel, admin.ModelAdmin)
| 18.6
| 46
| 0.784946
| 24
| 186
| 5.875
| 0.708333
| 0.170213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006135
| 0.123656
| 186
| 9
| 47
| 20.666667
| 0.858896
| 0.112903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e8077a56267313a12893272828a9f5a253590b26
| 148
|
py
|
Python
|
src/baal/__init__.py
|
1715labs/baal
|
ee61f89c78353f7d50d4a3b3285ee52aab09dd12
|
[
"Apache-2.0"
] | null | null | null |
src/baal/__init__.py
|
1715labs/baal
|
ee61f89c78353f7d50d4a3b3285ee52aab09dd12
|
[
"Apache-2.0"
] | 1
|
2020-09-28T17:48:27.000Z
|
2020-09-28T17:48:27.000Z
|
src/baal/__init__.py
|
1715labs/baal
|
ee61f89c78353f7d50d4a3b3285ee52aab09dd12
|
[
"Apache-2.0"
] | null | null | null |
from .version import __version__
from .modelwrapper import ModelWrapper
from .utils.log_configuration import set_logger_config
set_logger_config()
| 24.666667
| 54
| 0.864865
| 19
| 148
| 6.263158
| 0.526316
| 0.151261
| 0.252101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094595
| 148
| 5
| 55
| 29.6
| 0.88806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e81b26875c22192d91847ac7c73201b54c6dfedd
| 173
|
py
|
Python
|
src/z3c/traverser/stackinfo/__init__.py
|
zopefoundation/z3c.traverser
|
72b1fdd0864e25dc48e890a50eb8e223599d7b43
|
[
"ZPL-2.1"
] | null | null | null |
src/z3c/traverser/stackinfo/__init__.py
|
zopefoundation/z3c.traverser
|
72b1fdd0864e25dc48e890a50eb8e223599d7b43
|
[
"ZPL-2.1"
] | 1
|
2021-01-18T15:55:02.000Z
|
2021-01-18T15:55:02.000Z
|
src/z3c/traverser/stackinfo/__init__.py
|
zopefoundation/z3c.traverser
|
72b1fdd0864e25dc48e890a50eb8e223599d7b43
|
[
"ZPL-2.1"
] | 1
|
2020-07-03T10:58:29.000Z
|
2020-07-03T10:58:29.000Z
|
from __future__ import absolute_import
from .traversing import applyStackConsumers
def applyStackConsumersHandler(obj, event):
applyStackConsumers(obj, event.request)
| 24.714286
| 43
| 0.83815
| 17
| 173
| 8.235294
| 0.647059
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109827
| 173
| 6
| 44
| 28.833333
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
08fab97df465ca25d1a56afafabe60bf57e62991
| 75
|
py
|
Python
|
jacdac/sound_player/__init__.py
|
microsoft/jacdac-python
|
712ad5559e29065f5eccb5dbfe029c039132df5a
|
[
"MIT"
] | 1
|
2022-02-15T21:30:36.000Z
|
2022-02-15T21:30:36.000Z
|
jacdac/sound_player/__init__.py
|
microsoft/jacdac-python
|
712ad5559e29065f5eccb5dbfe029c039132df5a
|
[
"MIT"
] | null | null | null |
jacdac/sound_player/__init__.py
|
microsoft/jacdac-python
|
712ad5559e29065f5eccb5dbfe029c039132df5a
|
[
"MIT"
] | 1
|
2022-02-08T19:32:45.000Z
|
2022-02-08T19:32:45.000Z
|
# Autogenerated file.
from .client import SoundPlayerClient # type: ignore
| 25
| 52
| 0.8
| 8
| 75
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 75
| 2
| 53
| 37.5
| 0.923077
| 0.426667
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1c0b49aa11d3b1aede94558c050276e9eb57a0f1
| 5,018
|
py
|
Python
|
tests/unit/utils/test_bundle_utils.py
|
jgweir/hammr
|
38adaab005e58294c1314b56957e4f040ad75749
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/utils/test_bundle_utils.py
|
jgweir/hammr
|
38adaab005e58294c1314b56957e4f040ad75749
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/utils/test_bundle_utils.py
|
jgweir/hammr
|
38adaab005e58294c1314b56957e4f040ad75749
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'UshareSoft'
import unittest
import json
import yaml
from mock import patch
from ussclicore.utils import generics_utils
from hammr.utils import bundle_utils
class TestFiles(unittest.TestCase):
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_no_name(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleWithoutName.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("There is no attribute [name] for a [bundle]", "ERROR")
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_no_version(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleWithoutVersion.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("no attribute [version] for [bundle]", "ERROR")
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_no_files(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleWithoutFiles.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("no attribute [files] for [bundle]", "ERROR")
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_files_no_name(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleFilesWithoutName.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("There is no attribute [name] for a [file]", "ERROR")
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_files_no_source(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleFilesWithoutSource.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("There is no attribute [source] for a [file]", "ERROR")
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_tag_softwarefile_and_bootorder(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleFilesTagSoftwareFileKeyBootOrder.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("There is the attribute [bootOrder] or [bootType] for file 'directoryTest' but is not tagged as 'bootscript'", "ERROR")
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_tag_bootscript_and_rights(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleFilesTagBootScriptKeyRights.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("There is the attribute [ownerGroup], [rights] or [symlink] for file 'cleanup_tmp.sh' but is not tagged as 'softwarefile'", "ERROR")
@patch("ussclicore.utils.printer.out")
def test_check_bundle_should_failed_when_tag_ospkg_in_directory(self, mock_method):
#Given
jsonPath = "tests/integration/data/bundle/bundleFilesTagOSPkgInDirectory.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
#Then
mock_method.assert_called_with("The file 'iotop-0.6-2.el7.noarch.rpm, with tag 'ospkg' must be in the first level files section", "ERROR")
def test_check_bundle_should_succeed_when_no_restrictionRule(self):
# Given
jsonPath = "tests/integration/data/bundle/bundleWithoutRestrictionRule.json"
# When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
# Then
self.assertIsNotNone(bundle)
def test_check_bundle_should_succeed_when_empty_restrictionRule(self):
# Given
jsonPath = "tests/integration/data/bundle/bundleWithEmptyRestrictionRule.json"
# When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle_utils.check_bundle(bundle)
# Then
self.assertIsNotNone(bundle)
def test_check_bundle_should_succeed(self):
#Given
jsonPath = "tests/integration/data/bundle/bundleFull.json"
#When
bundle = generics_utils.check_json_syntax(jsonPath)
bundle = bundle_utils.check_bundle(bundle)
#Then
self.assertIsNotNone(bundle)
if __name__ == '__main__':
unittest.main()
| 40.467742
| 171
| 0.714428
| 593
| 5,018
| 5.735245
| 0.168634
| 0.071156
| 0.038812
| 0.058218
| 0.781241
| 0.771832
| 0.771832
| 0.747721
| 0.713614
| 0.603352
| 0
| 0.000993
| 0.19709
| 5,018
| 123
| 172
| 40.796748
| 0.843137
| 0.029693
| 0
| 0.438356
| 0
| 0.041096
| 0.300786
| 0.187629
| 0
| 0
| 0
| 0
| 0.150685
| 1
| 0.150685
| false
| 0
| 0.082192
| 0
| 0.246575
| 0.109589
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1c1180ed52982dedf5d6ad6101dbf30925196688
| 45
|
py
|
Python
|
gluoncv/auto/__init__.py
|
Kh4L/gluon-cv
|
849411ed56632cd854850b07142087d599f97dcb
|
[
"Apache-2.0"
] | 5,447
|
2018-04-25T18:02:51.000Z
|
2022-03-31T00:59:49.000Z
|
gluoncv/auto/__init__.py
|
Kh4L/gluon-cv
|
849411ed56632cd854850b07142087d599f97dcb
|
[
"Apache-2.0"
] | 1,566
|
2018-04-25T21:14:04.000Z
|
2022-03-31T06:42:42.000Z
|
gluoncv/auto/__init__.py
|
Kh4L/gluon-cv
|
849411ed56632cd854850b07142087d599f97dcb
|
[
"Apache-2.0"
] | 1,345
|
2018-04-25T18:44:13.000Z
|
2022-03-30T19:32:53.000Z
|
"""GluonCV auto"""
from .estimators import *
| 15
| 25
| 0.688889
| 5
| 45
| 6.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 45
| 2
| 26
| 22.5
| 0.794872
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1c2d3c06541d8d9d49bb74af0946d1aa1c05a633
| 118
|
py
|
Python
|
jr_tools/exceptions.py
|
erickgnavar/jr_tools
|
46719b5708446ab19139fd5ac29d5bf51cf896b8
|
[
"MIT"
] | 1
|
2017-11-02T01:39:11.000Z
|
2017-11-02T01:39:11.000Z
|
jr_tools/exceptions.py
|
erickgnavar/jasper-reports-tools
|
46719b5708446ab19139fd5ac29d5bf51cf896b8
|
[
"MIT"
] | 1
|
2021-11-15T17:46:27.000Z
|
2021-11-15T17:46:27.000Z
|
jr_tools/exceptions.py
|
erickgnavar/jr_tools
|
46719b5708446ab19139fd5ac29d5bf51cf896b8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class ConnectionError(Exception):
pass
class InvalidOutputFormat(Exception):
pass
| 11.8
| 37
| 0.677966
| 11
| 118
| 7.272727
| 0.727273
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.194915
| 118
| 9
| 38
| 13.111111
| 0.831579
| 0.177966
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
1c3b9a147a8ab5ee9b8b9425ba2e1e83038eb05c
| 59
|
py
|
Python
|
detcon/__init__.py
|
isaaccorley/detcon-pytorch
|
a5e03faf0c27bdbe64b72625873c0b2d3a696f04
|
[
"MIT"
] | 5
|
2021-10-30T05:10:42.000Z
|
2022-03-26T08:44:17.000Z
|
detcon/__init__.py
|
isaaccorley/detcon-pytorch
|
a5e03faf0c27bdbe64b72625873c0b2d3a696f04
|
[
"MIT"
] | 2
|
2021-12-20T08:52:50.000Z
|
2021-12-28T15:14:48.000Z
|
detcon/__init__.py
|
isaaccorley/detcon-pytorch
|
a5e03faf0c27bdbe64b72625873c0b2d3a696f04
|
[
"MIT"
] | 1
|
2022-02-01T11:02:40.000Z
|
2022-02-01T11:02:40.000Z
|
from . import datasets, losses
from .models import DetConB
| 19.666667
| 30
| 0.79661
| 8
| 59
| 5.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 59
| 2
| 31
| 29.5
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
98d2b15c42043b09771296ff8f071db9e9efd6d6
| 378
|
py
|
Python
|
calculator.py
|
lewis1286/ci_calculator_app
|
84df381a89b22c97d0ded4c11dc1e482618dc0de
|
[
"BSD-2-Clause"
] | null | null | null |
calculator.py
|
lewis1286/ci_calculator_app
|
84df381a89b22c97d0ded4c11dc1e482618dc0de
|
[
"BSD-2-Clause"
] | null | null | null |
calculator.py
|
lewis1286/ci_calculator_app
|
84df381a89b22c97d0ded4c11dc1e482618dc0de
|
[
"BSD-2-Clause"
] | null | null | null |
"""
Calculator library with basic math opps
"""
def add(first, second):
"""TODO: Docstring for add.
:returns: TODO
"""
return first + second
def subtract(first, second):
"""TODO: Docstring for subtract.
:returns: TODO
"""
return first - second
def multiply(arg1, arg2):
"""TODO: Docstring for multiply.
"""
return arg1 * arg2
| 14.538462
| 39
| 0.60582
| 43
| 378
| 5.325581
| 0.44186
| 0.19214
| 0.209607
| 0.209607
| 0.50655
| 0.270742
| 0
| 0
| 0
| 0
| 0
| 0.014388
| 0.26455
| 378
| 25
| 40
| 15.12
| 0.809353
| 0.420635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
98e3c2f6c39b72cb9f527ca6ff2ac3632d0b3633
| 224
|
py
|
Python
|
malcolm/modules/excalibur/parts/excaliburdriverpart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
malcolm/modules/excalibur/parts/excaliburdriverpart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
malcolm/modules/excalibur/parts/excaliburdriverpart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
from malcolm.modules.ADCore.parts import ExposureDetectorDriverPart
class ExcaliburDriverPart(ExposureDetectorDriverPart):
def is_hardware_triggered(self, child):
return child.triggerMode.value != "Internal"
| 24.888889
| 67
| 0.799107
| 21
| 224
| 8.428571
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129464
| 224
| 8
| 68
| 28
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0.036036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
98f29b8144c328bbee7c0c651e6848b6ac948e0e
| 95
|
py
|
Python
|
functions/modules/import_function.py
|
nv-krishna/python-crash-course
|
d481faeb2196712cd52ca1d34dc1fe967d13712f
|
[
"Apache-2.0"
] | 2
|
2020-11-02T05:52:33.000Z
|
2021-06-09T01:28:22.000Z
|
functions/modules/import_function.py
|
nv-krishna/python-crash-course
|
d481faeb2196712cd52ca1d34dc1fe967d13712f
|
[
"Apache-2.0"
] | null | null | null |
functions/modules/import_function.py
|
nv-krishna/python-crash-course
|
d481faeb2196712cd52ca1d34dc1fe967d13712f
|
[
"Apache-2.0"
] | 2
|
2021-04-08T05:26:04.000Z
|
2021-06-09T01:28:23.000Z
|
from pizza import make_pizza
make_pizza(21,"garlic")
make_pizza(12,"cheese","sausages","vege")
| 23.75
| 41
| 0.768421
| 15
| 95
| 4.666667
| 0.666667
| 0.385714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044944
| 0.063158
| 95
| 4
| 41
| 23.75
| 0.741573
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
98f521e9b4dabd4eb5a26e983446d261788ddd44
| 152
|
py
|
Python
|
vnpy/api/sec/__init__.py
|
black0144/vnpy
|
0d0ea30dad14a0150f7500ff9a62528030321426
|
[
"MIT"
] | 5
|
2019-01-17T12:14:14.000Z
|
2021-05-30T10:24:42.000Z
|
vnpy/api/sec/__init__.py
|
black0144/vnpy
|
0d0ea30dad14a0150f7500ff9a62528030321426
|
[
"MIT"
] | 1
|
2018-06-12T10:08:24.000Z
|
2018-06-12T10:08:24.000Z
|
vnpy/api/sec/__init__.py
|
black0144/vnpy
|
0d0ea30dad14a0150f7500ff9a62528030321426
|
[
"MIT"
] | 5
|
2019-03-26T03:17:45.000Z
|
2019-11-05T08:08:18.000Z
|
# encoding: UTF-8
from __future__ import absolute_import
from .vnsecmd import MdApi
from .vnsectd import TdApi
from . import sec_data_type as DATA_TYPE
| 25.333333
| 40
| 0.822368
| 24
| 152
| 4.875
| 0.625
| 0.136752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007634
| 0.138158
| 152
| 6
| 40
| 25.333333
| 0.885496
| 0.098684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c735322ab15c9d6c01283a3440148977eb561f06
| 209
|
py
|
Python
|
era/tests/runner.py
|
doctorzeb8/django-era
|
6fd433ef6081c5be295df22bcea70dc2642baaa4
|
[
"MIT"
] | 1
|
2015-10-22T04:03:28.000Z
|
2015-10-22T04:03:28.000Z
|
era/tests/runner.py
|
doctorzeb8/django-era
|
6fd433ef6081c5be295df22bcea70dc2642baaa4
|
[
"MIT"
] | 3
|
2020-02-11T21:30:20.000Z
|
2021-06-10T17:24:30.000Z
|
era/tests/runner.py
|
doctorzeb8/django-era
|
6fd433ef6081c5be295df22bcea70dc2642baaa4
|
[
"MIT"
] | null | null | null |
from django.test.runner import DiscoverRunner
class NoDbDiscoverRunner(DiscoverRunner):
def setup_databases(self, **kwargs):
pass
def teardown_databases(self, *args, **kwargs):
pass
| 20.9
| 50
| 0.708134
| 22
| 209
| 6.636364
| 0.727273
| 0.178082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.200957
| 209
| 9
| 51
| 23.222222
| 0.874252
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.166667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
c73972c7d0f1fc7aa70206f24683cedb2f03b965
| 119
|
py
|
Python
|
e3d/__init__.py
|
jr-garcia/Engendro3D
|
93a6a6c26be2b9a8c1520e9d83516c39532ab1ed
|
[
"MIT"
] | 8
|
2017-04-19T03:59:43.000Z
|
2020-04-29T00:29:12.000Z
|
e3d/__init__.py
|
jr-garcia/Engendro3D
|
93a6a6c26be2b9a8c1520e9d83516c39532ab1ed
|
[
"MIT"
] | null | null | null |
e3d/__init__.py
|
jr-garcia/Engendro3D
|
93a6a6c26be2b9a8c1520e9d83516c39532ab1ed
|
[
"MIT"
] | 3
|
2018-04-26T16:57:46.000Z
|
2021-03-01T05:48:06.000Z
|
from . import backends
from .Logging import logLevelsEnum
from ._engine import Engine
from .version import __version__
| 23.8
| 34
| 0.831933
| 15
| 119
| 6.266667
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134454
| 119
| 4
| 35
| 29.75
| 0.912621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c744cd35f678264001469e3b4e41200953c3652d
| 48
|
py
|
Python
|
python/testData/copyPaste/ReplaceSelection.dst.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/copyPaste/ReplaceSelection.dst.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/copyPaste/ReplaceSelection.dst.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
a = 1
b = 1
<selection>c = 1
</selection><caret>
| 12
| 19
| 0.604167
| 9
| 48
| 3.222222
| 0.666667
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.1875
| 48
| 4
| 19
| 12
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c76dd21e13ed687ed47df6315a567c2a3a6a7970
| 120
|
py
|
Python
|
pywalkgen/imu/__init__.py
|
virtual-origami/pywalkgen
|
e860b8fb327aa6ceb05849f610855657f917a1ea
|
[
"MIT"
] | null | null | null |
pywalkgen/imu/__init__.py
|
virtual-origami/pywalkgen
|
e860b8fb327aa6ceb05849f610855657f917a1ea
|
[
"MIT"
] | 1
|
2021-12-11T12:06:30.000Z
|
2021-12-11T12:14:41.000Z
|
pywalkgen/imu/__init__.py
|
virtual-origami/pywalkgen
|
e860b8fb327aa6ceb05849f610855657f917a1ea
|
[
"MIT"
] | null | null | null |
from __future__ import generator_stop
from __future__ import annotations
from .IMU import IMU
__all__ = [
'IMU'
]
| 13.333333
| 37
| 0.758333
| 15
| 120
| 5.2
| 0.533333
| 0.25641
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191667
| 120
| 8
| 38
| 15
| 0.804124
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c796826103393b999fe378306285e25700607481
| 128
|
py
|
Python
|
hm_duration/src/__init__.py
|
GRV96/hm_duration
|
89de54a114cab42862dbe6b6dd5b2180adf2ee0d
|
[
"MIT"
] | null | null | null |
hm_duration/src/__init__.py
|
GRV96/hm_duration
|
89de54a114cab42862dbe6b6dd5b2180adf2ee0d
|
[
"MIT"
] | null | null | null |
hm_duration/src/__init__.py
|
GRV96/hm_duration
|
89de54a114cab42862dbe6b6dd5b2180adf2ee0d
|
[
"MIT"
] | null | null | null |
from .duration_string import\
DURATION_STR_PATTERN, duration_to_str, str_repr_duration
from .hm_duration import HM_Duration
| 32
| 60
| 0.851563
| 19
| 128
| 5.263158
| 0.473684
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 128
| 3
| 61
| 42.666667
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c7bb68f30f57826c76e108e07f89ea1dd499904a
| 2,094
|
py
|
Python
|
hipyelp/migrations/0020_auto_20210426_0027.py
|
buggydev1/Hip_yelp_Back
|
f80407d898d267465e5cfbf7f73905b776e91b43
|
[
"MIT"
] | null | null | null |
hipyelp/migrations/0020_auto_20210426_0027.py
|
buggydev1/Hip_yelp_Back
|
f80407d898d267465e5cfbf7f73905b776e91b43
|
[
"MIT"
] | null | null | null |
hipyelp/migrations/0020_auto_20210426_0027.py
|
buggydev1/Hip_yelp_Back
|
f80407d898d267465e5cfbf7f73905b776e91b43
|
[
"MIT"
] | 2
|
2021-04-22T14:42:57.000Z
|
2021-05-18T01:11:58.000Z
|
# Generated by Django 3.2 on 2021-04-26 00:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hipyelp', '0019_alter_drinktag_tagname'),
]
operations = [
migrations.RemoveField(
model_name='foodtag',
name='tags',
),
migrations.AddField(
model_name='foodtag',
name='tagname',
field=models.CharField(default='', max_length=20),
preserve_default=False,
),
migrations.AlterField(
model_name='drink',
name='group',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='drink',
name='lat',
field=models.CharField(max_length=20),
),
migrations.AlterField(
model_name='drink',
name='lon',
field=models.CharField(max_length=20),
),
migrations.AlterField(
model_name='drink',
name='photo_url',
field=models.CharField(max_length=200),
),
migrations.AlterField(
model_name='drink',
name='tags',
field=models.CharField(max_length=20),
),
migrations.AlterField(
model_name='food',
name='group',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='food',
name='lat',
field=models.CharField(max_length=20),
),
migrations.AlterField(
model_name='food',
name='lon',
field=models.CharField(max_length=20),
),
migrations.AlterField(
model_name='food',
name='photo_url',
field=models.CharField(default='l', max_length=200),
preserve_default=False,
),
migrations.AlterField(
model_name='food',
name='tags',
field=models.CharField(max_length=20),
),
]
| 27.92
| 64
| 0.522445
| 189
| 2,094
| 5.62963
| 0.26455
| 0.101504
| 0.206767
| 0.272556
| 0.714286
| 0.705827
| 0.604323
| 0.512218
| 0.468045
| 0.468045
| 0
| 0.032811
| 0.359599
| 2,094
| 74
| 65
| 28.297297
| 0.760626
| 0.020535
| 0
| 0.808824
| 1
| 0
| 0.074671
| 0.013177
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014706
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c7c848fea561a4a8e56a1a1a27db6417e17e0d67
| 54
|
py
|
Python
|
noisyopt/__init__.py
|
glassnotes/noisyopt
|
625b874918ac5926c3bf05d869e914cbddfffe20
|
[
"MIT"
] | 70
|
2016-05-10T14:27:37.000Z
|
2022-03-10T08:28:34.000Z
|
noisyopt/__init__.py
|
glassnotes/noisyopt
|
625b874918ac5926c3bf05d869e914cbddfffe20
|
[
"MIT"
] | 18
|
2016-05-18T16:15:49.000Z
|
2021-07-29T18:55:13.000Z
|
noisyopt/__init__.py
|
glassnotes/noisyopt
|
625b874918ac5926c3bf05d869e914cbddfffe20
|
[
"MIT"
] | 16
|
2016-05-18T15:51:34.000Z
|
2022-01-17T23:43:46.000Z
|
from .main import *
from .version import __version__
| 18
| 32
| 0.777778
| 7
| 54
| 5.428571
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 54
| 2
| 33
| 27
| 0.844444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c7e7b72d217e8f38b3ece14595546193a3908f41
| 166
|
py
|
Python
|
datadog_checks_base/datadog_checks/checks/win/winpdh_stub.py
|
glasser/integrations-core
|
1dd515d49b1690a1369ee5195713605b1b072b1f
|
[
"BSD-3-Clause"
] | 2
|
2019-05-28T03:48:29.000Z
|
2019-07-05T07:05:58.000Z
|
datadog_checks_base/datadog_checks/checks/win/winpdh_stub.py
|
glasser/integrations-core
|
1dd515d49b1690a1369ee5195713605b1b072b1f
|
[
"BSD-3-Clause"
] | 4
|
2019-07-03T02:53:19.000Z
|
2019-07-10T14:52:14.000Z
|
datadog_checks_base/datadog_checks/checks/win/winpdh_stub.py
|
glasser/integrations-core
|
1dd515d49b1690a1369ee5195713605b1b072b1f
|
[
"BSD-3-Clause"
] | 1
|
2019-12-23T13:35:17.000Z
|
2019-12-23T13:35:17.000Z
|
# (C) Datadog, Inc. 2013-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# flake8: noqa
from ...base.checks.win.winpdh_stub import *
| 27.666667
| 53
| 0.740964
| 24
| 166
| 5.083333
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06338
| 0.144578
| 166
| 5
| 54
| 33.2
| 0.795775
| 0.674699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c7f4b9b59d15e7684c139b1984362a1dfa8b11a0
| 44
|
py
|
Python
|
test/run/t198.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t198.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t198.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
for i in (1 for x in range(3)):
print i
| 14.666667
| 31
| 0.568182
| 11
| 44
| 2.272727
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.318182
| 44
| 2
| 32
| 22
| 0.766667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
40291e047baa82298b6dd0733c2ec67ff1c56fee
| 12,212
|
py
|
Python
|
tests/test_crn_parser.py
|
DNA-and-Natural-Algorithms-Group/crnverifier
|
c78b1165bd4fdfc3972cf943b9e8e1b1c9ee42fe
|
[
"MIT"
] | null | null | null |
tests/test_crn_parser.py
|
DNA-and-Natural-Algorithms-Group/crnverifier
|
c78b1165bd4fdfc3972cf943b9e8e1b1c9ee42fe
|
[
"MIT"
] | null | null | null |
tests/test_crn_parser.py
|
DNA-and-Natural-Algorithms-Group/crnverifier
|
c78b1165bd4fdfc3972cf943b9e8e1b1c9ee42fe
|
[
"MIT"
] | null | null | null |
#
# tests/test_crn_parser.py
#
# Written by Stefan Badelt (bad-ants-fleet@posteo.eu).
#
import unittest
from pyparsing import ParseException
from crnverifier.crn_parser import parse_crn_string, CRNParseError
class TestCRNparser(unittest.TestCase):
def test_concentration_specs(self):
input_string = """
# Default concentrations
A @ initial 50
B @ constant 50
C @ constant 10
# Reactions
A + B -> C
"""
output_unprocessed = [['concentration', ['A'], ['initial'], ['50']],
['concentration', ['B'], ['constant'], ['50']],
['concentration', ['C'], ['constant'], ['10']],
['irreversible', [['A'], ['B']], [['C']]]]
self.assertEqual(parse_crn_string(input_string, process=False), output_unprocessed)
output_processed1 = [[['A', 'B'], ['C'], [1]]]
output_processed2 = {'A' : ('initial', 50),
'B' : ('constant', 50),
'C' : ('constant', 10)}
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
input_string = "A@i50; B@c20; A + B -> C [k = 77]; <=> C [kf = 18, kr = 77]"
output_unprocessed = [['concentration', ['A'], ['i'], ['50']],
['concentration', ['B'], ['c'], ['20']],
['irreversible', [['A'], ['B']], [['C']], ['77']],
['reversible', [], [['C']], ['18', '77']] ]
self.assertEqual(parse_crn_string(input_string, process=False), output_unprocessed)
output_processed1 = [[['A', 'B'], ['C'], [77]],
[[],['C'],[18, 77]]]
output_processed2 = {'A' : ('initial', 50),
'B' : ('constant', 20),
'C' : ('initial', 0)}
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
def test_duplicate_concentration_error(self):
input_string = "A@i50; A@c20; A + B -> C [k = 77]; <=> C [kf = 18, kr = 77]"
with self.assertRaises(CRNParseError):
parse_crn_string(input_string)
def test_float_examples(self):
input_string = """
# Comment
A + B -> C [k = 0.32]
"""
output_unprocessed = [['irreversible', [['A'], ['B']], [['C']], ['0.32']]]
output_processed1 = [[['A', 'B'], ['C'], [0.32]]]
output_processed2 = {'A' : ('initial', 0),
'B' : ('initial', 0),
'C' : ('initial', 0)}
assert parse_crn_string(input_string, process = False) == output_unprocessed
assert parse_crn_string(input_string) == (output_processed1, output_processed2)
def test_parse_examples(self):
input_string = """
# Comment
A + B -> C
"""
output_unprocessed = [['irreversible', [['A'], ['B']], [['C']]]]
output_processed1 = [[['A', 'B'], ['C'], [1]]]
output_processed2 = {'A' : ('initial', 0),
'B' : ('initial', 0),
'C' : ('initial', 0)}
output_processed3 = [[['A', 'B'], ['C'], [1]]]
output_processed4 = {'A' : (None, None),
'B' : (None, None),
'C' : (None, None)}
self.assertEqual(parse_crn_string(input_string, process = False),
output_unprocessed, 'parse reaction unprocessed 1')
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
o3, o4 = parse_crn_string(input_string, process = True,
defaultrate = 1,
defaultmode = None,
defaultconc = None)
self.assertEqual(o3, output_processed3)
self.assertEqual(o4, output_processed4)
input_string = """A + B -> C [k = 10.8]"""
output_unprocessed = [['irreversible', [['A'], ['B']], [['C']], ['10.8']]]
output_processed1 = [[['A', 'B'], ['C'], [10.8]]]
self.assertEqual(parse_crn_string(input_string, process = False),
output_unprocessed, 'parse reaction unprocessed 2')
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
input_string = """A + B -> C [k = 8]"""
output_unprocessed = [['irreversible', [['A'], ['B']], [['C']], ['8']]]
output_processed1 = [[['A', 'B'], ['C'], [8]]]
self.assertEqual(parse_crn_string(input_string, process=False),
output_unprocessed, 'parse reaction unprocessed 3')
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
with self.assertRaises(ParseException):
# Only one rate specified for reversible reaction
parse_crn_string("A <=> C [k=14]")
def test_duplicate_input(self):
input_string = """ # Allowing duplicate specification!
A + B -> C
A + B <=> C
"""
output_unprocessed = [['irreversible', [['A'], ['B']], [['C']]],
['reversible', [['A'], ['B']], [['C']]]]
output_processed1 = [[['A', 'B'], ['C'], [1]],
[['A', 'B'], ['C'], [1, 1]]]
output_processed2 = {'A' : ('initial', 0),
'B' : ('initial', 0),
'C' : ('initial', 0)}
self.assertEqual(parse_crn_string(input_string, process=False),
output_unprocessed,
'parse reaction unprocessed 4')
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
input_string = """
# Comment
A + B -> C [k = 18]
A + B <=> C [kf = 99, kr = 77]
"""
output_unprocessed = [['irreversible', [['A'], ['B']], [['C']], ['18']],
['reversible', [['A'], ['B']], [['C']], ['99', '77']]]
output_processed1 = [[['A', 'B'], ['C'], [18]],
[['A', 'B'], ['C'], [99, 77]]]
self.assertEqual(parse_crn_string(input_string, process=False),
output_unprocessed,
'parse reaction unprocessed 5')
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
input_string = """
# Comment
A + B -> C [18]
A + B <=> C [99, 77]
"""
output_unprocessed = [['irreversible', [['A'], ['B']], [['C']], ['18']],
['reversible', [['A'], ['B']], [['C']], ['99', '77']]]
output_processed1 = [[['A', 'B'], ['C'], [18]],
[['A', 'B'], ['C'], [99, 77]]]
self.assertEqual(parse_crn_string(input_string, process=False),
output_unprocessed,
'parse reaction unprocessed 5')
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
input_string = "A + B -> C + X [0.48]; X + A <=> C + L [0.89, 0.87]"
exp1 = [[['A', 'B'], ['C', 'X'], [0.48]], [['X', 'A'], ['C', 'L'], [0.89, 0.87]]]
exp2 = {'A': ('initial', 0), 'B': ('initial', 0), 'C': ('initial', 0), 'X': ('initial', 0), 'L': ('initial', 0)}
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, exp1)
def test_multiple_species(self):
input_string = """5A -> 3B + 2C"""
output_processed1 = [[['A', 'A', 'A', 'A', 'A'],
['B', 'B', 'B', 'C', 'C'], [1]]]
output_processed2 = {'A' : ('initial', 0),
'B' : ('initial', 0),
'C' : ('initial', 0)}
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
input_string = """
A + B -> C [k = 77]; <=> C [kf = 18, kr = 77]
X + 2Y <=> Z
"""
output_processed1 = [[['A', 'B'], ['C'], [77]], [[], ['C'], [18, 77]],
[['X', 'Y', 'Y'], ['Z'], [1, 1]]]
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
input_string = """2Na + Cl2 <=> 2NaCl"""
output_processed1 = [[['Na', 'Na', 'Cl2'], ['NaCl', 'NaCl'], [1, 1]]]
output_processed2 = {'Na' : ('initial', 0),
'Cl2' : ('initial', 0),
'NaCl' : ('initial', 0)}
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
input_string = """N2 + 3H2 <=> 2NH3"""
output_processed1 = [[['N2', 'H2', 'H2', 'H2'], ['NH3', 'NH3'], [1, 1]]]
output_processed2 = {'N2' : ('initial', 0),
'H2' : ('initial', 0),
'NH3' : ('initial', 0)}
o1, o2 = parse_crn_string(input_string)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
def test_crn_modules(self):
input_string = """
# Default concentrations
A @ initial 50
B @ constant 50
C @ constant 10
# Reactions
A + B -> C
"""
output_unprocessed = [[['concentration', ['A'], ['initial'], ['50']]],
[['concentration', ['B'], ['constant'], ['50']]],
[['concentration', ['C'], ['constant'], ['10']]],
[['irreversible', [['A'], ['B']], [['C']]]]]
self.assertEqual(parse_crn_string(input_string, process = False, modular = True), output_unprocessed)
output_processed1 = [[[['A', 'B'], ['C'], [1]]]]
output_processed2 = {'A' : ('initial', 50),
'B' : ('constant', 50),
'C' : ('constant', 10)}
o1, o2 = parse_crn_string(input_string, modular = True)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
input_string = """
# Default concentrations
A @ initial 50
B @ constant 50
C @ constant 10
# Reactions
A + B -> C [1e-3]
D + C <=> 2C
A + B -> C; F + C -> B
"""
output_unprocessed = [[['concentration', ['A'], ['initial'], ['50']]],
[['concentration', ['B'], ['constant'], ['50']]],
[['concentration', ['C'], ['constant'], ['10']]],
[['irreversible', [['A'], ['B']], [['C']], ['1e-3']]],
[['reversible', [['D'], ['C']], [['2', 'C']]]],
[['irreversible', [['A'], ['B']], [['C']]],
['irreversible', [['F'], ['C']], [['B']]]]]
self.assertEqual(parse_crn_string(input_string, process = False, modular = True), output_unprocessed)
output_processed1 = [[[['A', 'B'], ['C'], [1e-3]]],
[[['D', 'C'], ['C', 'C'], [1, 1]]],
[[['A', 'B'], ['C'], [1]],
[['F', 'C'], ['B'], [1]]]]
output_processed2 = {'A' : ('initial', 50),
'B' : ('constant', 50),
'C' : ('constant', 10),
'D' : ('initial', 0),
'F' : ('initial', 0)}
o1, o2 = parse_crn_string(input_string, modular = True)
self.assertEqual(o1, output_processed1)
self.assertEqual(o2, output_processed2)
if __name__ == '__main__':
unittest.main()
| 43.304965
| 120
| 0.454225
| 1,195
| 12,212
| 4.464435
| 0.10795
| 0.019869
| 0.028679
| 0.10328
| 0.780319
| 0.768885
| 0.73552
| 0.712465
| 0.700656
| 0.658482
| 0
| 0.04846
| 0.35113
| 12,212
| 281
| 121
| 43.459075
| 0.624811
| 0.010236
| 0
| 0.563559
| 0
| 0.016949
| 0.196523
| 0
| 0
| 0
| 0
| 0
| 0.177966
| 1
| 0.029661
| false
| 0
| 0.012712
| 0
| 0.04661
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4036c6c9a11088924e412f4ea1d5e301efcee1e1
| 217
|
py
|
Python
|
train/__init__.py
|
fhvilshoj/ECINN
|
8c0b8097a5791d3d2697a11332cd21f34b4c76e4
|
[
"MIT"
] | 3
|
2021-12-25T13:10:33.000Z
|
2022-01-24T21:40:40.000Z
|
train/__init__.py
|
fhvilshoj/ECINN
|
8c0b8097a5791d3d2697a11332cd21f34b4c76e4
|
[
"MIT"
] | null | null | null |
train/__init__.py
|
fhvilshoj/ECINN
|
8c0b8097a5791d3d2697a11332cd21f34b4c76e4
|
[
"MIT"
] | 1
|
2021-12-03T17:47:19.000Z
|
2021-12-03T17:47:19.000Z
|
from .train import train
def add_arguments(parser): pass
def needs_config(): return True
def needs_model(): return True
def needs_data(): return True
def fn(args, cfg, model, data):
train(cfg, model, data)
| 19.727273
| 32
| 0.718894
| 34
| 217
| 4.470588
| 0.5
| 0.157895
| 0.256579
| 0.236842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175115
| 217
| 10
| 33
| 21.7
| 0.849162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.714286
| false
| 0.142857
| 0.142857
| 0.428571
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 5
|
40505e6bdf04df79bac24f226c2c3e7237aa7c49
| 149
|
py
|
Python
|
nick_derobertis_site/common/cli/templates/component/{{ cookiecutter.snake_case_name }}/{{ cookiecutter.snake_case_name }}_model.py
|
nickderobertis/nick-derobertis-site
|
386061dc258921eed41f2d3965ef69e02adde7ba
|
[
"MIT"
] | 1
|
2022-03-31T10:55:40.000Z
|
2022-03-31T10:55:40.000Z
|
nick_derobertis_site/common/cli/templates/component/{{ cookiecutter.snake_case_name }}/{{ cookiecutter.snake_case_name }}_model.py
|
nickderobertis/nick-derobertis-site
|
386061dc258921eed41f2d3965ef69e02adde7ba
|
[
"MIT"
] | 8
|
2020-08-28T11:44:37.000Z
|
2020-08-31T09:19:19.000Z
|
nick_derobertis_site/common/cli/templates/component/{{ cookiecutter.snake_case_name }}/{{ cookiecutter.snake_case_name }}_model.py
|
nickderobertis/nick-derobertis-site
|
386061dc258921eed41f2d3965ef69e02adde7ba
|
[
"MIT"
] | null | null | null |
import param
from nick_derobertis_site.common.model import ComponentModel
class {{ cookiecutter.pascal_case_name }}Model(ComponentModel):
pass
| 21.285714
| 63
| 0.818792
| 18
| 149
| 6.555556
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114094
| 149
| 7
| 64
| 21.285714
| 0.893939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.25
| 0.5
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
4055f1745062c390a0e3a7bc2c86c2f521d324cf
| 31
|
py
|
Python
|
torch/__main__.py
|
sarathcakurathi/TOrch
|
345e63bdb9d21605c9d72ecdc4b0d99d6f9285ac
|
[
"MIT"
] | null | null | null |
torch/__main__.py
|
sarathcakurathi/TOrch
|
345e63bdb9d21605c9d72ecdc4b0d99d6f9285ac
|
[
"MIT"
] | null | null | null |
torch/__main__.py
|
sarathcakurathi/TOrch
|
345e63bdb9d21605c9d72ecdc4b0d99d6f9285ac
|
[
"MIT"
] | 1
|
2020-11-04T18:30:28.000Z
|
2020-11-04T18:30:28.000Z
|
from .torch import main
main()
| 10.333333
| 23
| 0.741935
| 5
| 31
| 4.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 31
| 3
| 24
| 10.333333
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4069134aab4cebdfc76b57858ca6fb798f629e90
| 591
|
py
|
Python
|
coremltools/converters/nnssa/coreml/graph_pass/__init__.py
|
mrfarhadi/coremltools
|
29e6ec3868fbfaf163c2133b1e7d1b5ce688add1
|
[
"BSD-3-Clause"
] | null | null | null |
coremltools/converters/nnssa/coreml/graph_pass/__init__.py
|
mrfarhadi/coremltools
|
29e6ec3868fbfaf163c2133b1e7d1b5ce688add1
|
[
"BSD-3-Clause"
] | 1
|
2019-09-17T17:04:39.000Z
|
2019-09-17T17:04:39.000Z
|
coremltools/converters/nnssa/coreml/graph_pass/__init__.py
|
mrfarhadi/coremltools
|
29e6ec3868fbfaf163c2133b1e7d1b5ce688add1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
from .op_removals import remove_no_ops_and_shift_control_dependencies
from .op_removals import constant_weight_link_removal
from .op_removals import remove_single_isolated_node
from .op_removals import remove_identity
from .op_fusions import fuse_bias_add, transform_nhwc_to_nchw, \
onehot_matmul_to_embedding, fuse_layer_norm, fuse_gelu, \
fuse_conv_mul_add_into_batchnorm
from .mlmodel_passes import remove_disconnected_constants
| 36.9375
| 69
| 0.852792
| 86
| 591
| 5.232558
| 0.581395
| 0.066667
| 0.124444
| 0.177778
| 0.173333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001901
| 0.109983
| 591
| 15
| 70
| 39.4
| 0.853612
| 0.035533
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.090909
| 0.818182
| 0
| 0.818182
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
407a3cc4ae2f234c23107887a9e637c303435f75
| 108
|
py
|
Python
|
src/respeaker_ros/__init__.py
|
nickswalker/respeaker_ros
|
5384ae0cce91db0012eaa168b2f1a0acbfdafd56
|
[
"Apache-2.0"
] | null | null | null |
src/respeaker_ros/__init__.py
|
nickswalker/respeaker_ros
|
5384ae0cce91db0012eaa168b2f1a0acbfdafd56
|
[
"Apache-2.0"
] | null | null | null |
src/respeaker_ros/__init__.py
|
nickswalker/respeaker_ros
|
5384ae0cce91db0012eaa168b2f1a0acbfdafd56
|
[
"Apache-2.0"
] | null | null | null |
from .audio import RespeakerAudio
from .interface import RespeakerInterface
from .node import RespeakerNode
| 27
| 41
| 0.861111
| 12
| 108
| 7.75
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 108
| 3
| 42
| 36
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
407cfa2e2fbee5d9ce6f6c44294fe279ab674339
| 133
|
py
|
Python
|
var_stats_qc/admin.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 14
|
2019-09-30T12:44:17.000Z
|
2022-02-04T14:45:16.000Z
|
var_stats_qc/admin.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 244
|
2021-03-26T15:13:15.000Z
|
2022-03-31T15:48:04.000Z
|
var_stats_qc/admin.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 8
|
2020-05-19T21:55:13.000Z
|
2022-03-31T07:02:58.000Z
|
from django.contrib import admin
from .models import ReferenceSite
# Register your models here.
admin.site.register(ReferenceSite)
| 19
| 34
| 0.819549
| 17
| 133
| 6.411765
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120301
| 133
| 6
| 35
| 22.166667
| 0.931624
| 0.195489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
409c510ee4487985f3f7a0cfbcbab3b3761c3d9e
| 43,467
|
py
|
Python
|
plugins/modules/oci_devops_build_pipeline_stage.py
|
slmjy/oci-ansible-collection
|
349c91e2868bf4706a6e3d6fb3b47fc622bfe11b
|
[
"Apache-2.0"
] | 108
|
2020-05-19T20:46:10.000Z
|
2022-03-25T14:10:01.000Z
|
plugins/modules/oci_devops_build_pipeline_stage.py
|
slmjy/oci-ansible-collection
|
349c91e2868bf4706a6e3d6fb3b47fc622bfe11b
|
[
"Apache-2.0"
] | 90
|
2020-06-14T22:07:11.000Z
|
2022-03-07T05:40:29.000Z
|
plugins/modules/oci_devops_build_pipeline_stage.py
|
slmjy/oci-ansible-collection
|
349c91e2868bf4706a6e3d6fb3b47fc622bfe11b
|
[
"Apache-2.0"
] | 42
|
2020-08-30T23:09:12.000Z
|
2022-03-25T16:58:01.000Z
|
#!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_devops_build_pipeline_stage
short_description: Manage a BuildPipelineStage resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a BuildPipelineStage resource in Oracle Cloud Infrastructure
- For I(state=present), creates a new stage.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
display_name:
description:
- Stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
- Required for create, update, delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- This parameter is updatable when C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["name"]
description:
description:
- Optional description about the stage.
- This parameter is updatable.
type: str
build_pipeline_stage_type:
description:
- "Defines the stage type, which is one of the following: Build, Deliver Artifacts, Wait, and Trigger Deployment."
- Required for create using I(state=present), update using I(state=present) with build_pipeline_stage_id present.
type: str
choices:
- "DELIVER_ARTIFACT"
- "TRIGGER_DEPLOYMENT_PIPELINE"
- "WAIT"
- "BUILD"
build_pipeline_id:
description:
- The OCID of the build pipeline.
- Required for create using I(state=present).
type: str
build_pipeline_stage_predecessor_collection:
description:
- ""
- Required for create using I(state=present).
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is one of ['DELIVER_ARTIFACT', 'BUILD', 'TRIGGER_DEPLOYMENT_PIPELINE', 'WAIT']
type: dict
suboptions:
items:
description:
- A list of build pipeline stage predecessors for a stage.
- Required when build_pipeline_stage_type is 'DELIVER_ARTIFACT'
type: list
elements: dict
required: true
suboptions:
id:
description:
- The ID of the predecessor stage. If a stage is the first stage in the pipeline, then the ID is the pipeline's ID.
- Required when build_pipeline_stage_type is 'DELIVER_ARTIFACT'
type: str
required: true
freeform_tags:
description:
- "Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{\\"bar-key\\": \\"value\\"}`"
- This parameter is updatable.
type: dict
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace. See L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
- This parameter is updatable.
type: dict
deliver_artifact_collection:
description:
- ""
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'DELIVER_ARTIFACT'
- Required when build_pipeline_stage_type is 'DELIVER_ARTIFACT'
type: dict
suboptions:
items:
description:
- Collection of artifacts that were generated in the Build stage and need to be pushed to the artifactory stores. In case of UPDATE
operation, replaces existing artifacts list. Merging with existing artifacts is not supported.
- Required when build_pipeline_stage_type is 'DELIVER_ARTIFACT'
type: list
elements: dict
required: true
suboptions:
artifact_name:
description:
- Name of the artifact specified in the build_spec.yaml file.
- Required when build_pipeline_stage_type is 'DELIVER_ARTIFACT'
type: str
required: true
artifact_id:
description:
- Artifact identifier that contains the artifact definition.
- Required when build_pipeline_stage_type is 'DELIVER_ARTIFACT'
type: str
required: true
deploy_pipeline_id:
description:
- A target deployment pipeline OCID that will run in this stage.
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'TRIGGER_DEPLOYMENT_PIPELINE'
- Required when build_pipeline_stage_type is 'TRIGGER_DEPLOYMENT_PIPELINE'
type: str
is_pass_all_parameters_enabled:
description:
- A boolean flag that specifies whether all the parameters must be passed when the deployment is triggered.
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'TRIGGER_DEPLOYMENT_PIPELINE'
- Required when build_pipeline_stage_type is 'TRIGGER_DEPLOYMENT_PIPELINE'
type: bool
wait_criteria:
description:
- ""
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'WAIT'
- Required when build_pipeline_stage_type is 'WAIT'
type: dict
suboptions:
wait_type:
description:
- Wait criteria type.
- This parameter is updatable.
type: str
choices:
- "ABSOLUTE_WAIT"
required: true
wait_duration:
description:
- The absolute wait duration.
Minimum wait duration must be 5 seconds.
Maximum wait duration can be up to 2 days.
- This parameter is updatable.
- Applicable when wait_type is 'ABSOLUTE_WAIT'
type: str
image:
description:
- Image name for the build environment
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'BUILD'
- Required when build_pipeline_stage_type is 'BUILD'
type: str
build_spec_file:
description:
- The path to the build specification file for this environment. The default location of the file if not specified is build_spec.yaml.
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'BUILD'
type: str
stage_execution_timeout_in_seconds:
description:
- Timeout for the build stage execution. Specify value in seconds.
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'BUILD'
type: int
build_source_collection:
description:
- ""
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'BUILD'
- Required when build_pipeline_stage_type is 'BUILD'
type: dict
suboptions:
items:
description:
- Collection of build sources. In case of UPDATE operation, replaces existing build sources list. Merging with existing build sources is not
supported.
- Required when build_pipeline_stage_type is 'BUILD'
type: list
elements: dict
required: true
suboptions:
name:
description:
- Name of the build source. This must be unique within a build source collection. The name can be used by customers to locate the
working directory pertinent to this repository.
type: str
required: true
connection_type:
description:
- The type of source provider.
type: str
choices:
- "GITHUB"
- "DEVOPS_CODE_REPOSITORY"
- "GITLAB"
required: true
repository_url:
description:
- URL for the repository.
type: str
required: true
branch:
description:
- Branch name.
type: str
required: true
connection_id:
description:
- Connection identifier pertinent to GitHub source provider.
- Required when connection_type is one of ['GITHUB', 'GITLAB']
type: str
repository_id:
description:
- The DevOps code repository ID.
- Required when connection_type is 'DEVOPS_CODE_REPOSITORY'
type: str
primary_build_source:
description:
- Name of the build source where the build_spec.yml file is located. If not specified, the first entry in the build source collection is chosen as
primary build source.
- This parameter is updatable.
- Applicable when build_pipeline_stage_type is 'BUILD'
type: str
build_pipeline_stage_id:
description:
- Unique stage identifier.
- Required for update using I(state=present) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
- Required for delete using I(state=absent) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["id"]
state:
description:
- The state of the BuildPipelineStage.
- Use I(state=present) to create or update a BuildPipelineStage.
- Use I(state=absent) to delete a BuildPipelineStage.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create build_pipeline_stage with build_pipeline_stage_type = DELIVER_ARTIFACT
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: DELIVER_ARTIFACT
build_pipeline_id: "ocid1.buildpipeline.oc1..xxxxxxEXAMPLExxxxxx"
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
deliver_artifact_collection:
# required
items:
- # required
artifact_name: artifact_name_example
artifact_id: "ocid1.artifact.oc1..xxxxxxEXAMPLExxxxxx"
- name: Create build_pipeline_stage with build_pipeline_stage_type = TRIGGER_DEPLOYMENT_PIPELINE
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: TRIGGER_DEPLOYMENT_PIPELINE
build_pipeline_id: "ocid1.buildpipeline.oc1..xxxxxxEXAMPLExxxxxx"
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
deploy_pipeline_id: "ocid1.deploypipeline.oc1..xxxxxxEXAMPLExxxxxx"
is_pass_all_parameters_enabled: true
- name: Create build_pipeline_stage with build_pipeline_stage_type = WAIT
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: WAIT
build_pipeline_id: "ocid1.buildpipeline.oc1..xxxxxxEXAMPLExxxxxx"
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
wait_criteria:
# required
wait_type: ABSOLUTE_WAIT
wait_duration: wait_duration_example
- name: Create build_pipeline_stage with build_pipeline_stage_type = BUILD
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: BUILD
build_pipeline_id: "ocid1.buildpipeline.oc1..xxxxxxEXAMPLExxxxxx"
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
image: image_example
build_spec_file: build_spec_file_example
stage_execution_timeout_in_seconds: 56
build_source_collection:
# required
items:
- # required
name: name_example
connection_type: GITHUB
repository_url: repository_url_example
branch: branch_example
connection_id: "ocid1.connection.oc1..xxxxxxEXAMPLExxxxxx"
primary_build_source: primary_build_source_example
- name: Update build_pipeline_stage with build_pipeline_stage_type = DELIVER_ARTIFACT
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: DELIVER_ARTIFACT
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
deliver_artifact_collection:
# required
items:
- # required
artifact_name: artifact_name_example
artifact_id: "ocid1.artifact.oc1..xxxxxxEXAMPLExxxxxx"
- name: Update build_pipeline_stage with build_pipeline_stage_type = TRIGGER_DEPLOYMENT_PIPELINE
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: TRIGGER_DEPLOYMENT_PIPELINE
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
deploy_pipeline_id: "ocid1.deploypipeline.oc1..xxxxxxEXAMPLExxxxxx"
is_pass_all_parameters_enabled: true
- name: Update build_pipeline_stage with build_pipeline_stage_type = WAIT
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: WAIT
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
wait_criteria:
# required
wait_type: ABSOLUTE_WAIT
wait_duration: wait_duration_example
- name: Update build_pipeline_stage with build_pipeline_stage_type = BUILD
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: BUILD
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
image: image_example
build_spec_file: build_spec_file_example
stage_execution_timeout_in_seconds: 56
build_source_collection:
# required
items:
- # required
name: name_example
connection_type: GITHUB
repository_url: repository_url_example
branch: branch_example
connection_id: "ocid1.connection.oc1..xxxxxxEXAMPLExxxxxx"
primary_build_source: primary_build_source_example
- name: Update build_pipeline_stage using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set) with build_pipeline_stage_type = DELIVER_ARTIFACT
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: DELIVER_ARTIFACT
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
deliver_artifact_collection:
# required
items:
- # required
artifact_name: artifact_name_example
artifact_id: "ocid1.artifact.oc1..xxxxxxEXAMPLExxxxxx"
- name: >
Update build_pipeline_stage using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
with build_pipeline_stage_type = TRIGGER_DEPLOYMENT_PIPELINE
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: TRIGGER_DEPLOYMENT_PIPELINE
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
deploy_pipeline_id: "ocid1.deploypipeline.oc1..xxxxxxEXAMPLExxxxxx"
is_pass_all_parameters_enabled: true
- name: Update build_pipeline_stage using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set) with build_pipeline_stage_type = WAIT
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: WAIT
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
wait_criteria:
# required
wait_type: ABSOLUTE_WAIT
wait_duration: wait_duration_example
- name: Update build_pipeline_stage using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set) with build_pipeline_stage_type = BUILD
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_type: BUILD
# optional
display_name: display_name_example
description: description_example
build_pipeline_stage_predecessor_collection:
# required
items:
- # required
id: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
image: image_example
build_spec_file: build_spec_file_example
stage_execution_timeout_in_seconds: 56
build_source_collection:
# required
items:
- # required
name: name_example
connection_type: GITHUB
repository_url: repository_url_example
branch: branch_example
connection_id: "ocid1.connection.oc1..xxxxxxEXAMPLExxxxxx"
primary_build_source: primary_build_source_example
- name: Delete build_pipeline_stage
oci_devops_build_pipeline_stage:
# required
build_pipeline_stage_id: "ocid1.buildpipelinestage.oc1..xxxxxxEXAMPLExxxxxx"
state: absent
- name: Delete build_pipeline_stage using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_devops_build_pipeline_stage:
# required
display_name: display_name_example
state: absent
"""
RETURN = """
build_pipeline_stage:
description:
- Details of the BuildPipelineStage resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- Unique identifier that is immutable on creation.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- Stage display name, which can be renamed and is not necessarily unique. Avoid entering confidential information.
returned: on success
type: str
sample: display_name_example
description:
description:
- Optional description about the build stage.
returned: on success
type: str
sample: description_example
project_id:
description:
- The OCID of the DevOps project.
returned: on success
type: str
sample: "ocid1.project.oc1..xxxxxxEXAMPLExxxxxx"
build_pipeline_id:
description:
- The OCID of the build pipeline.
returned: on success
type: str
sample: "ocid1.buildpipeline.oc1..xxxxxxEXAMPLExxxxxx"
compartment_id:
description:
- The OCID of the compartment where the pipeline is created.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
build_pipeline_stage_type:
description:
- "Defines the stage type, which is one of the following: Build, Deliver Artifacts, Wait, and Trigger Deployment."
returned: on success
type: str
sample: WAIT
time_created:
description:
- The time the stage was created. Format defined by L(RFC3339,https://datatracker.ietf.org/doc/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The time the stage was updated. Format defined by L(RFC3339,https://datatracker.ietf.org/doc/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the stage.
returned: on success
type: str
sample: CREATING
lifecycle_details:
description:
- A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed
state.
returned: on success
type: str
sample: lifecycle_details_example
build_pipeline_stage_predecessor_collection:
description:
- ""
returned: on success
type: complex
contains:
items:
description:
- A list of build pipeline stage predecessors for a stage.
returned: on success
type: complex
contains:
id:
description:
- The ID of the predecessor stage. If a stage is the first stage in the pipeline, then the ID is the pipeline's ID.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
freeform_tags:
description:
- "Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{\\"bar-key\\": \\"value\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace. See L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
system_tags:
description:
- "Usage of system tag keys. These predefined keys are scoped to namespaces. See L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{\\"orcl-cloud\\": {\\"free-tier-retained\\":
\\"true\\"}}`"
returned: on success
type: dict
sample: {}
image:
description:
- Image name for the build environment.
returned: on success
type: str
sample: OL7_X86_64_STANDARD_10
build_spec_file:
description:
- The path to the build specification file for this environment. The default location of the file if not specified is build_spec.yaml.
returned: on success
type: str
sample: build_spec_file_example
stage_execution_timeout_in_seconds:
description:
- Timeout for the build stage execution. Specify value in seconds.
returned: on success
type: int
sample: 56
build_source_collection:
description:
- ""
returned: on success
type: complex
contains:
items:
description:
- Collection of build sources. In case of UPDATE operation, replaces existing build sources list. Merging with existing build sources is
not supported.
returned: on success
type: complex
contains:
name:
description:
- Name of the build source. This must be unique within a build source collection. The name can be used by customers to locate
the working directory pertinent to this repository.
returned: on success
type: str
sample: name_example
connection_type:
description:
- The type of source provider.
returned: on success
type: str
sample: GITHUB
repository_url:
description:
- URL for the repository.
returned: on success
type: str
sample: repository_url_example
branch:
description:
- Branch name.
returned: on success
type: str
sample: branch_example
repository_id:
description:
- The DevOps code repository ID.
returned: on success
type: str
sample: "ocid1.repository.oc1..xxxxxxEXAMPLExxxxxx"
connection_id:
description:
- Connection identifier pertinent to GitHub source provider.
returned: on success
type: str
sample: "ocid1.connection.oc1..xxxxxxEXAMPLExxxxxx"
primary_build_source:
description:
- Name of the build source where the build_spec.yml file is located. If not specified, then the first entry in the build source collection is
chosen as primary build source.
returned: on success
type: str
sample: primary_build_source_example
deliver_artifact_collection:
description:
- ""
returned: on success
type: complex
contains:
items:
description:
- Collection of artifacts that were generated in the Build stage and need to be pushed to the artifactory stores. In case of UPDATE
operation, replaces existing artifacts list. Merging with existing artifacts is not supported.
returned: on success
type: complex
contains:
artifact_name:
description:
- Name of the artifact specified in the build_spec.yaml file.
returned: on success
type: str
sample: artifact_name_example
artifact_id:
description:
- Artifact identifier that contains the artifact definition.
returned: on success
type: str
sample: "ocid1.artifact.oc1..xxxxxxEXAMPLExxxxxx"
deploy_pipeline_id:
description:
- A target deployment pipeline OCID that will run in this stage.
returned: on success
type: str
sample: "ocid1.deploypipeline.oc1..xxxxxxEXAMPLExxxxxx"
is_pass_all_parameters_enabled:
description:
- A boolean flag that specifies whether all the parameters must be passed when the deployment is triggered.
returned: on success
type: bool
sample: true
wait_criteria:
description:
- ""
returned: on success
type: complex
contains:
wait_type:
description:
- Wait criteria type.
returned: on success
type: str
sample: ABSOLUTE_WAIT
wait_duration:
description:
- The absolute wait duration. An ISO 8601 formatted duration string. Minimum waitDuration should be 5 seconds. Maximum waitDuration can
be up to 2 days.
returned: on success
type: str
sample: wait_duration_example
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"project_id": "ocid1.project.oc1..xxxxxxEXAMPLExxxxxx",
"build_pipeline_id": "ocid1.buildpipeline.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"build_pipeline_stage_type": "WAIT",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example",
"build_pipeline_stage_predecessor_collection": {
"items": [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
}]
},
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"system_tags": {},
"image": "OL7_X86_64_STANDARD_10",
"build_spec_file": "build_spec_file_example",
"stage_execution_timeout_in_seconds": 56,
"build_source_collection": {
"items": [{
"name": "name_example",
"connection_type": "GITHUB",
"repository_url": "repository_url_example",
"branch": "branch_example",
"repository_id": "ocid1.repository.oc1..xxxxxxEXAMPLExxxxxx",
"connection_id": "ocid1.connection.oc1..xxxxxxEXAMPLExxxxxx"
}]
},
"primary_build_source": "primary_build_source_example",
"deliver_artifact_collection": {
"items": [{
"artifact_name": "artifact_name_example",
"artifact_id": "ocid1.artifact.oc1..xxxxxxEXAMPLExxxxxx"
}]
},
"deploy_pipeline_id": "ocid1.deploypipeline.oc1..xxxxxxEXAMPLExxxxxx",
"is_pass_all_parameters_enabled": true,
"wait_criteria": {
"wait_type": "ABSOLUTE_WAIT",
"wait_duration": "wait_duration_example"
}
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.devops import DevopsClient
from oci.devops.models import CreateBuildPipelineStageDetails
from oci.devops.models import UpdateBuildPipelineStageDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class BuildPipelineStageHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, get, list and delete"""
def get_module_resource_id_param(self):
return "build_pipeline_stage_id"
def get_module_resource_id(self):
return self.module.params.get("build_pipeline_stage_id")
def get_get_fn(self):
return self.client.get_build_pipeline_stage
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_build_pipeline_stage,
build_pipeline_stage_id=self.module.params.get("build_pipeline_stage_id"),
)
def get_required_kwargs_for_list(self):
return dict()
def get_optional_kwargs_for_list(self):
optional_list_method_params = ["build_pipeline_id", "display_name"]
return dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
and (
self._use_name_as_identifier()
or (
not self.module.params.get("key_by")
or param in self.module.params.get("key_by")
)
)
)
def list_resources(self):
required_kwargs = self.get_required_kwargs_for_list()
optional_kwargs = self.get_optional_kwargs_for_list()
kwargs = oci_common_utils.merge_dicts(required_kwargs, optional_kwargs)
return oci_common_utils.list_all_resources(
self.client.list_build_pipeline_stages, **kwargs
)
def get_create_model_class(self):
return CreateBuildPipelineStageDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_build_pipeline_stage,
call_fn_args=(),
call_fn_kwargs=dict(create_build_pipeline_stage_details=create_details,),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def get_update_model_class(self):
return UpdateBuildPipelineStageDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_build_pipeline_stage,
call_fn_args=(),
call_fn_kwargs=dict(
build_pipeline_stage_id=self.module.params.get(
"build_pipeline_stage_id"
),
update_build_pipeline_stage_details=update_details,
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_build_pipeline_stage,
call_fn_args=(),
call_fn_kwargs=dict(
build_pipeline_stage_id=self.module.params.get(
"build_pipeline_stage_id"
),
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
BuildPipelineStageHelperCustom = get_custom_class("BuildPipelineStageHelperCustom")
class ResourceHelper(BuildPipelineStageHelperCustom, BuildPipelineStageHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
display_name=dict(aliases=["name"], type="str"),
description=dict(type="str"),
build_pipeline_stage_type=dict(
type="str",
choices=[
"DELIVER_ARTIFACT",
"TRIGGER_DEPLOYMENT_PIPELINE",
"WAIT",
"BUILD",
],
),
build_pipeline_id=dict(type="str"),
build_pipeline_stage_predecessor_collection=dict(
type="dict",
options=dict(
items=dict(
type="list",
elements="dict",
required=True,
options=dict(id=dict(type="str", required=True)),
)
),
),
freeform_tags=dict(type="dict"),
defined_tags=dict(type="dict"),
deliver_artifact_collection=dict(
type="dict",
options=dict(
items=dict(
type="list",
elements="dict",
required=True,
options=dict(
artifact_name=dict(type="str", required=True),
artifact_id=dict(type="str", required=True),
),
)
),
),
deploy_pipeline_id=dict(type="str"),
is_pass_all_parameters_enabled=dict(type="bool", no_log=True),
wait_criteria=dict(
type="dict",
options=dict(
wait_type=dict(
type="str", required=True, choices=["ABSOLUTE_WAIT"]
),
wait_duration=dict(type="str"),
),
),
image=dict(type="str"),
build_spec_file=dict(type="str"),
stage_execution_timeout_in_seconds=dict(type="int"),
build_source_collection=dict(
type="dict",
options=dict(
items=dict(
type="list",
elements="dict",
required=True,
options=dict(
name=dict(type="str", required=True),
connection_type=dict(
type="str",
required=True,
choices=["GITHUB", "DEVOPS_CODE_REPOSITORY", "GITLAB"],
),
repository_url=dict(type="str", required=True),
branch=dict(type="str", required=True),
connection_id=dict(type="str"),
repository_id=dict(type="str"),
),
)
),
),
primary_build_source=dict(type="str"),
build_pipeline_stage_id=dict(aliases=["id"], type="str"),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="build_pipeline_stage",
service_client_class=DevopsClient,
namespace="devops",
)
result = dict(changed=False)
if resource_helper.is_delete_using_name():
result = resource_helper.delete_using_name()
elif resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update_using_name():
result = resource_helper.update_using_name()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 40.209991
| 160
| 0.595394
| 4,322
| 43,467
| 5.727441
| 0.091162
| 0.068272
| 0.085804
| 0.044437
| 0.818817
| 0.784722
| 0.727357
| 0.669912
| 0.63416
| 0.615577
| 0
| 0.008014
| 0.336853
| 43,467
| 1,080
| 161
| 40.247222
| 0.850755
| 0.010146
| 0
| 0.680915
| 0
| 0.030815
| 0.803273
| 0.164481
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012922
| false
| 0.00994
| 0.007952
| 0.007952
| 0.034791
| 0.000994
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
904ec7378f0c9d39114b18d503b502443dbf7626
| 14
|
py
|
Python
|
HDF_process_2020.05/__init__.py
|
944568752/Ocean_Color_Data_Process
|
64ccf79db0f0154c4dea4a667cad826797957791
|
[
"MIT"
] | null | null | null |
HDF_process_2020.05/__init__.py
|
944568752/Ocean_Color_Data_Process
|
64ccf79db0f0154c4dea4a667cad826797957791
|
[
"MIT"
] | null | null | null |
HDF_process_2020.05/__init__.py
|
944568752/Ocean_Color_Data_Process
|
64ccf79db0f0154c4dea4a667cad826797957791
|
[
"MIT"
] | null | null | null |
Initialize
| 2.8
| 10
| 0.714286
| 1
| 14
| 10
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 14
| 4
| 11
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
905ec926c78ddfb13f1fdfa0cfb63d4031745de4
| 960
|
py
|
Python
|
cpc/abst/Slot.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | 1
|
2021-05-09T07:10:19.000Z
|
2021-05-09T07:10:19.000Z
|
cpc/abst/Slot.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | null | null | null |
cpc/abst/Slot.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | null | null | null |
from .Node import Node
from type.Type import Type
class Slot(Node):
def __init__(self,t,n):
self._type_node = t
self._name = n
self._offset = Type.size_unknown
def type_node(self):
return self._type_node
def type_ref(self):
return self._type_node.type_ref()
def type(self):
return self._type_node.type()
def name(self):
return self._name
def size(self):
return self.type().size()
def alloc_size(self):
return self.type().alloc_size()
def alignment(self):
return self.type().alignment()
def offset(self):
return self._offset
def set_offset(self,offset):
self._offset = offset
def location(self):
return self._type_node.location()
def _dump(self,dumper):
dumper.print_member("name",self._name)
dumper.print_member("type_node",self._type_node)
| 22.857143
| 56
| 0.597917
| 124
| 960
| 4.362903
| 0.201613
| 0.133087
| 0.232902
| 0.232902
| 0.25878
| 0.096118
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296875
| 960
| 42
| 56
| 22.857143
| 0.801481
| 0
| 0
| 0
| 0
| 0
| 0.013528
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.066667
| 0.3
| 0.8
| 0.066667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
90679b536a18d16605b48cb83026e555f4e2bac1
| 12
|
py
|
Python
|
campy/writer/__init__.py
|
SamMinkowicz/campy
|
b554b04cbf90fd576536ba4c137393721012ea50
|
[
"MIT"
] | 14
|
2020-12-16T16:52:58.000Z
|
2022-02-21T12:20:07.000Z
|
campy/writer/__init__.py
|
SamMinkowicz/campy
|
b554b04cbf90fd576536ba4c137393721012ea50
|
[
"MIT"
] | 7
|
2021-05-26T17:29:16.000Z
|
2022-02-19T17:37:58.000Z
|
campy/writer/__init__.py
|
SamMinkowicz/campy
|
b554b04cbf90fd576536ba4c137393721012ea50
|
[
"MIT"
] | 8
|
2020-12-22T20:06:51.000Z
|
2022-02-12T21:03:23.000Z
|
# __init__
| 4
| 10
| 0.666667
| 1
| 12
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 12
| 2
| 11
| 6
| 0.444444
| 0.666667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9083effe18a83a9ffd4da49b021bac2f7169b68b
| 87
|
py
|
Python
|
setup.py
|
Nibuja05/KVConverter
|
74f810df4ac82358f405eac9c2f56dce13b69302
|
[
"MIT"
] | 2
|
2020-07-06T00:24:27.000Z
|
2021-09-20T20:16:36.000Z
|
setup.py
|
Nibuja05/KVConverter
|
74f810df4ac82358f405eac9c2f56dce13b69302
|
[
"MIT"
] | null | null | null |
setup.py
|
Nibuja05/KVConverter
|
74f810df4ac82358f405eac9c2f56dce13b69302
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
import py2exe
setup(console=['KV_Converter_Main.py'])
| 21.75
| 39
| 0.816092
| 13
| 87
| 5.307692
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0125
| 0.08046
| 87
| 4
| 39
| 21.75
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9094433e9ba2a7367b279a0751fba39f7029f40b
| 28
|
py
|
Python
|
schedulark/queue/memory/__init__.py
|
knowark/schedulark
|
0b5c32d1eb67cd1e38a3f2166fe87532e0a3f8ea
|
[
"MIT"
] | 1
|
2021-11-04T16:15:56.000Z
|
2021-11-04T16:15:56.000Z
|
schedulark/queue/memory/__init__.py
|
knowark/schedulark
|
0b5c32d1eb67cd1e38a3f2166fe87532e0a3f8ea
|
[
"MIT"
] | null | null | null |
schedulark/queue/memory/__init__.py
|
knowark/schedulark
|
0b5c32d1eb67cd1e38a3f2166fe87532e0a3f8ea
|
[
"MIT"
] | null | null | null |
from .memory_queue import *
| 14
| 27
| 0.785714
| 4
| 28
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
90ac32a773954e453bf803017f774e13f72c3d0c
| 169
|
py
|
Python
|
explanator/local_exploration/__init__.py
|
k1414st/explanator
|
8a0a2a74fff93f83e3ae7834da7f6e94928af155
|
[
"MIT"
] | null | null | null |
explanator/local_exploration/__init__.py
|
k1414st/explanator
|
8a0a2a74fff93f83e3ae7834da7f6e94928af155
|
[
"MIT"
] | null | null | null |
explanator/local_exploration/__init__.py
|
k1414st/explanator
|
8a0a2a74fff93f83e3ae7834da7f6e94928af155
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import print_function
from .local_gradient import local_gradient, get_local_linear_model
__version__ = '0.0.1'
| 21.125
| 66
| 0.846154
| 24
| 169
| 5.166667
| 0.583333
| 0.16129
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.112426
| 169
| 7
| 67
| 24.142857
| 0.806667
| 0
| 0
| 0
| 0
| 0
| 0.029762
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.