hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
58790be682d4e6610bdf4c990ad83437e667ab45
| 121
|
py
|
Python
|
pipeline_dsl/__init__.py
|
isabella232/pipeline-dsl
|
543dc611821e75b9ee96a0277038de6350bec012
|
[
"Apache-2.0"
] | 15
|
2021-01-28T08:33:14.000Z
|
2022-01-05T20:24:26.000Z
|
pipeline_dsl/__init__.py
|
cherwin/pipeline-dsl
|
9bfa32c46f09fab15b35d46d4e7fccdadaef8d01
|
[
"Apache-2.0"
] | 14
|
2021-03-23T16:10:18.000Z
|
2021-08-24T09:03:07.000Z
|
pipeline_dsl/__init__.py
|
isabella232/pipeline-dsl
|
543dc611821e75b9ee96a0277038de6350bec012
|
[
"Apache-2.0"
] | 3
|
2021-03-24T08:46:21.000Z
|
2022-03-04T00:24:00.000Z
|
from pipeline_dsl.shell import shell, Password
from pipeline_dsl.concourse import *
from pipeline_dsl.resources import *
| 30.25
| 46
| 0.842975
| 17
| 121
| 5.823529
| 0.470588
| 0.363636
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107438
| 121
| 3
| 47
| 40.333333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
589599a7790f9156d6de6a809b50d17b4deaba10
| 77,188
|
py
|
Python
|
functions/xml_functions.py
|
mtasa-typescript/mtasa-wiki-dump
|
edea1746850fb6c99d6155d1d7891e2cceb33a5c
|
[
"MIT"
] | null | null | null |
functions/xml_functions.py
|
mtasa-typescript/mtasa-wiki-dump
|
edea1746850fb6c99d6155d1d7891e2cceb33a5c
|
[
"MIT"
] | 1
|
2021-02-24T21:50:18.000Z
|
2021-02-24T21:50:18.000Z
|
functions/xml_functions.py
|
mtasa-typescript/mtasa-wiki-dump
|
edea1746850fb6c99d6155d1d7891e2cceb33a5c
|
[
"MIT"
] | null | null | null |
# Autogenerated file. ANY CHANGES WILL BE OVERWRITTEN
from to_python.core.types import FunctionType, \
FunctionArgument, \
FunctionArgumentValues, \
FunctionReturnTypes, \
FunctionSignature, \
FunctionDoc, \
FunctionData, \
CompoundFunctionData
DUMP_PARTIAL = [
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlCopyFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='nodeToCopy',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='newFilePath',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function copies all contents of a certain node in a XML document to a new document file, so the copied node becomes the new files root node.\nThe new file will not be saved to file system until xmlSaveFile() is called' ,
arguments={
"nodeToCopy": """the xmlnode that is to be copied to a new document. """,
"newFilePath": """the path of the file that is to be created, in the following format: :resourceName/path. resourceName is the name of the resource the file is in, and path is the path from the root directory of the resource to the file.
:For example, to create a file named 'newfile.xml' with myNode as the root node in the resource 'ctf', it can be done from another resource this way: ''xmlCopyFile(myNode, ":ctf/newfile.xml")''.
:If the file is to be in the current resource, only the file path is necessary, e.g. ''xmlCopyFile(myNode, "newfile.xml")''. """
},
result='returns the xmlnode of the copy if the node was successfully copied, false if invalid arguments were passed.' ,
),
url='xmlCopyFile',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlCopyFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='nodeToCopy',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='newFilePath',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function copies all contents of a certain node in a XML document to a new document file, so the copied node becomes the new files root node.\nThe new file will not be saved to file system until xmlSaveFile() is called' ,
arguments={
"nodeToCopy": """the xmlnode that is to be copied to a new document. """,
"newFilePath": """the path of the file that is to be created, in the following format: :resourceName/path. resourceName is the name of the resource the file is in, and path is the path from the root directory of the resource to the file.
:For example, to create a file named 'newfile.xml' with myNode as the root node in the resource 'ctf', it can be done from another resource this way: ''xmlCopyFile(myNode, ":ctf/newfile.xml")''.
:If the file is to be in the current resource, only the file path is necessary, e.g. ''xmlCopyFile(myNode, "newfile.xml")''. """
},
result='returns the xmlnode of the copy if the node was successfully copied, false if invalid arguments were passed.' ,
),
url='xmlCopyFile',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlCreateChild',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='parentNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='tagName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function creates a new child node under an XML node.' ,
arguments={
"parentNode": """the xmlnode you want to create a new child node under. """,
"tagName": """the type of the child node that will be created. """
},
result='returns the created xmlnode if successful, false otherwise.' ,
),
url='xmlCreateChild',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlCreateChild',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='parentNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='tagName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function creates a new child node under an XML node.' ,
arguments={
"parentNode": """the xmlnode you want to create a new child node under. """,
"tagName": """the type of the child node that will be created. """
},
result='returns the created xmlnode if successful, false otherwise.' ,
),
url='xmlCreateChild',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlCreateFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='filePath',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='rootNodeName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function creates a new XML document, which can later be saved to a file by using xmlSaveFile. This function will overwrite the file specified if it already exists.' ,
arguments={
"filePath": """The filepath of the file in the following format: :resourceName/path. resourceName is the name of the resource the file will be in, and path is the path from the root directory of the resource to the file.
:For example, if you want to create a file named 'new.xml' in the resource 'ctf', it can be created from another resource this way: ''xmlCreateFile(":ctf/new.xml", "newroot")''.
:If the file is in the current resource, only the file path is necessary, e.g. ''xmlCreateFile("new.xml", "newroot")''.
:Note that if a different resource than default is being accessed, the caller resource needs access to general.ModifyOtherObjects in the [[ACL]]. """,
"rootNodeName": """the name of the root node in the XML document. """
},
result='returns the root xmlnode object of the new xml file if successful, or false otherwise.' ,
),
url='xmlCreateFile',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlCreateFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='filePath',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='rootNodeName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function creates a new XML document, which can later be saved to a file by using xmlSaveFile. This function will overwrite the file specified if it already exists.' ,
arguments={
"filePath": """The filepath of the file in the following format: :resourceName/path. resourceName is the name of the resource the file will be in, and path is the path from the root directory of the resource to the file.
:For example, if you want to create a file named 'new.xml' in the resource 'ctf', it can be created from another resource this way: ''xmlCreateFile(":ctf/new.xml", "newroot")''.
:If the file is in the current resource, only the file path is necessary, e.g. ''xmlCreateFile("new.xml", "newroot")''.
:Note that if a different resource than default is being accessed, the caller resource needs access to general.ModifyOtherObjects in the [[ACL]]. """,
"rootNodeName": """the name of the root node in the XML document. """
},
result='returns the root xmlnode object of the new xml file if successful, or false otherwise.' ,
),
url='xmlCreateFile',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlDestroyNode',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theXMLNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function destroys a XML node from the XML node tree.' ,
arguments={
"theXMLNode": """The xml node you want to destroy. """
},
result='returns true if the xml node was successfully destroyed, false otherwise.' ,
),
url='xmlDestroyNode',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlDestroyNode',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theXMLNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function destroys a XML node from the XML node tree.' ,
arguments={
"theXMLNode": """The xml node you want to destroy. """
},
result='returns true if the xml node was successfully destroyed, false otherwise.' ,
),
url='xmlDestroyNode',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlFindChild',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='parent',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='tagName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='index',
argument_type=FunctionType(
names=['int'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function returns a named child node of an XML node.' ,
arguments={
"parent": """: This is an xmlnode that you want to find the child node under. """,
"tagName": """: This is the name of the child node you wish to find (case-sensitive). """,
"index": """: This is the 0-based index of the node you wish to find. For example, to find the 5th subnode with a particular name, you would use 4 as the index value. To find the first occurence, use 0. """
},
result='returns an xmlnode if the node was found, false otherwise.' ,
),
url='xmlFindChild',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlFindChild',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='parent',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='tagName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='index',
argument_type=FunctionType(
names=['int'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function returns a named child node of an XML node.' ,
arguments={
"parent": """: This is an xmlnode that you want to find the child node under. """,
"tagName": """: This is the name of the child node you wish to find (case-sensitive). """,
"index": """: This is the 0-based index of the node you wish to find. For example, to find the 5th subnode with a particular name, you would use 4 as the index value. To find the first occurence, use 0. """
},
result='returns an xmlnode if the node was found, false otherwise.' ,
),
url='xmlFindChild',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlLoadFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='filePath',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='readOnly',
argument_type=FunctionType(
names=['bool'],
is_optional=True,
),
default_value='false',
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function provides an alternative way to load XML files to getResourceConfig.\nThis function loads an XML file and returns the node by specifying a specific file path, while getResourceConfig allows for loading an XML file from a resource.' ,
arguments={
"filePath": """The filepath of the file in the following format: :resourceName/path. resourceName is the name of the resource the file is in, and path is the path from the root directory of the resource to the file.
:For example, if there is a file named 'settings.xml' in the resource 'ctf', it can be accessed from another resource this way: ''xmlLoadFile(":ctf/settings.xml")''.
:If the file is in the current resource, only the file path is necessary, e.g. ''xmlLoadFile("settings.xml")''. """,
"readOnly": """By default, the XML file is opened with reading and writing access. You can specify true for this parameter if you only need reading access. """
},
result='returns the root xmlnode object of an xml file if successful, or false otherwise.\nprint error if something wrong with xml.\n|7485}}' ,
),
url='xmlLoadFile',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlLoadFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='filePath',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='readOnly',
argument_type=FunctionType(
names=['bool'],
is_optional=True,
),
default_value='false',
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function provides an alternative way to load XML files to getResourceConfig.\nThis function loads an XML file and returns the node by specifying a specific file path, while getResourceConfig allows for loading an XML file from a resource.' ,
arguments={
"filePath": """The filepath of the file in the following format: :resourceName/path. resourceName is the name of the resource the file is in, and path is the path from the root directory of the resource to the file.
:For example, if there is a file named 'settings.xml' in the resource 'ctf', it can be accessed from another resource this way: ''xmlLoadFile(":ctf/settings.xml")''.
:If the file is in the current resource, only the file path is necessary, e.g. ''xmlLoadFile("settings.xml")''. """,
"readOnly": """By default, the XML file is opened with reading and writing access. You can specify true for this parameter if you only need reading access. """
},
result='returns the root xmlnode object of an xml file if successful, or false otherwise.\nprint error if something wrong with xml.\n|7485}}' ,
),
url='xmlLoadFile',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlLoadString',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='xmlString',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='' ,
arguments={
"xmlString": """A string containing XML data """
},
result='returns the root xmlnode object of an xml string if successful, or false otherwise (invalid xml string).' ,
),
url='xmlLoadString',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlLoadString',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='xmlString',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='' ,
arguments={
"xmlString": """A string containing XML data """
},
result='returns the root xmlnode object of an xml string if successful, or false otherwise (invalid xml string).' ,
),
url='xmlLoadString',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetAttribute',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='name',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is used to return an attribute of a node in a configuration file.' ,
arguments={
"node": """The node from which you wish to return the attribute """,
"name": """The name of the attribute. """
},
result='returns the attribute in string form or false, if the attribute is not defined.' ,
),
url='xmlNodeGetAttribute',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetAttribute',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='name',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is used to return an attribute of a node in a configuration file.' ,
arguments={
"node": """The node from which you wish to return the attribute """,
"name": """The name of the attribute. """
},
result='returns the attribute in string form or false, if the attribute is not defined.' ,
),
url='xmlNodeGetAttribute',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetAttributes',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['table'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Returns all the attributes of a specific XML node.' ,
arguments={
"node": """the XML node to get the attributes of. """
},
result='if successful, returns a table with as keys the names of the attributes and as values the corresponding attribute values. if the node has no attributes, returns an empty table. in case of failure, returns false.' ,
),
url='xmlNodeGetAttributes',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetAttributes',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['table'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Returns all the attributes of a specific XML node.' ,
arguments={
"node": """the XML node to get the attributes of. """
},
result='if successful, returns a table with as keys the names of the attributes and as values the corresponding attribute values. if the node has no attributes, returns an empty table. in case of failure, returns false.' ,
),
url='xmlNodeGetAttributes',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetChildren',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['table', 'xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='parent',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='index',
argument_type=FunctionType(
names=['int'],
is_optional=True,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function returns all children of a particular XML node, or a particular child node.' ,
arguments={
"parent": """This is the xmlnode you want to retrieve one or all child nodes of. """,
"index": """If you only want to retrieve one particular child node, specify its (0-based) index here. For example if you only want the first node, specify 0; the fifth node has index 4, etc. """
},
result='if index isnt specified, returns a table containing all child nodes. if index is specified, returns the corresponding child node if it exists. if no nodes are found, it returns an empty table. returns false in case of failure.' ,
),
url='xmlNodeGetChildren',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetChildren',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['table', 'xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='parent',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='index',
argument_type=FunctionType(
names=['int'],
is_optional=True,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function returns all children of a particular XML node, or a particular child node.' ,
arguments={
"parent": """This is the xmlnode you want to retrieve one or all child nodes of. """,
"index": """If you only want to retrieve one particular child node, specify its (0-based) index here. For example if you only want the first node, specify 0; the fifth node has index 4, etc. """
},
result='if index isnt specified, returns a table containing all child nodes. if index is specified, returns the corresponding child node if it exists. if no nodes are found, it returns an empty table. returns false in case of failure.' ,
),
url='xmlNodeGetChildren',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Gets the tag name of the specified XML node.' ,
arguments={
"node": """the node to get the tag name of. """
},
result='returns the tag name of the node if successful, false otherwise.' ,
),
url='xmlNodeGetName',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Gets the tag name of the specified XML node.' ,
arguments={
"node": """the node to get the tag name of. """
},
result='returns the tag name of the node if successful, false otherwise.' ,
),
url='xmlNodeGetName',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetParent',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Returns the parent node of an xml node.' ,
arguments={
"node": """the node of which you want to know the parent. """
},
result='returns the parent node of the specified node if successful. returns false if the specified node is the root node or an invalid node was passed.' ,
),
url='xmlNodeGetParent',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetParent',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['xmlnode'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Returns the parent node of an xml node.' ,
arguments={
"node": """the node of which you want to know the parent. """
},
result='returns the parent node of the specified node if successful. returns false if the specified node is the root node or an invalid node was passed.' ,
),
url='xmlNodeGetParent',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetValue',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theXMLNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is made to be able to read tag values in XML files (eg. <something>anything</something>).' ,
arguments={
"theXMLNode": """The xml node of which you need to know the value. """
},
result='returns the value of the node as a string if it was received successfully, false otherwise.' ,
),
url='xmlNodeGetValue',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeGetValue',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theXMLNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is made to be able to read tag values in XML files (eg. <something>anything</something>).' ,
arguments={
"theXMLNode": """The xml node of which you need to know the value. """
},
result='returns the value of the node as a string if it was received successfully, false otherwise.' ,
),
url='xmlNodeGetValue',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeSetAttribute',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='name',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='value',
argument_type=FunctionType(
names=['string', 'float'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is used to edit an attribute of a node in a configuration file.' ,
arguments={
"node": """The node of which you wish to edit an attribute. """,
"name": """The name of the attribute. """,
"value": """The value which you wish to change the attribute to. (Note: nil will delete the attribute) """
},
result='returns true if the attribute was set successfully, false if the node and/or attribute do not exist, or if theyre faulty.' ,
),
url='xmlNodeSetAttribute',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeSetAttribute',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='name',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='value',
argument_type=FunctionType(
names=['string', 'float'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is used to edit an attribute of a node in a configuration file.' ,
arguments={
"node": """The node of which you wish to edit an attribute. """,
"name": """The name of the attribute. """,
"value": """The value which you wish to change the attribute to. (Note: nil will delete the attribute) """
},
result='returns true if the attribute was set successfully, false if the node and/or attribute do not exist, or if theyre faulty.' ,
),
url='xmlNodeSetAttribute',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeSetName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='name',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Sets the tag name of the specified XML node.' ,
arguments={
"node": """the node to change the tag name of. """,
"name": """the new tag name to set. """
},
result='returns true if successful, false otherwise.' ,
),
url='xmlNodeSetName',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeSetName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='name',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Sets the tag name of the specified XML node.' ,
arguments={
"node": """the node to change the tag name of. """,
"name": """the new tag name to set. """
},
result='returns true if successful, false otherwise.' ,
),
url='xmlNodeSetName',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeSetValue',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theXMLNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='value',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='setCDATA',
argument_type=FunctionType(
names=['bool'],
is_optional=True,
),
default_value='false',
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is made to be able to assign values to tags in XML files (eg. <something>anything</something>).' ,
arguments={
"theXMLNode": """The xml node you want to set the value of. """,
"value": """The string value you want the node to have. """,
"setCDATA": """A boolean indicating if you want the value to be enclosed inside CDATA tags. """
},
result='returns true if value was successfully set, false otherwise.' ,
),
url='xmlNodeSetValue',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlNodeSetValue',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theXMLNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='value',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='setCDATA',
argument_type=FunctionType(
names=['bool'],
is_optional=True,
),
default_value='false',
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is made to be able to assign values to tags in XML files (eg. <something>anything</something>).' ,
arguments={
"theXMLNode": """The xml node you want to set the value of. """,
"value": """The string value you want the node to have. """,
"setCDATA": """A boolean indicating if you want the value to be enclosed inside CDATA tags. """
},
result='returns true if value was successfully set, false otherwise.' ,
),
url='xmlNodeSetValue',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlSaveFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='rootNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function saves a loaded XML file.' ,
arguments={
"rootNode": """the root xmlnode of the loaded XML file. """
},
result='returns true if save was successful, false if the xml file does not exist.' ,
),
url='xmlSaveFile',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlSaveFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='rootNode',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function saves a loaded XML file.' ,
arguments={
"rootNode": """the root xmlnode of the loaded XML file. """
},
result='returns true if save was successful, false if the xml file does not exist.' ,
),
url='xmlSaveFile',
)
],
),
CompoundFunctionData(
server=[
FunctionData(
signature=FunctionSignature(
name='xmlUnloadFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Unloads an XML document from memory.' ,
arguments={
"node": """root of the XML document to unload """
},
result='returns true if the document was unloaded successfully, false otherwise.' ,
),
url='xmlUnloadFile',
)
],
client=[
FunctionData(
signature=FunctionSignature(
name='xmlUnloadFile',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='node',
argument_type=FunctionType(
names=['xmlnode'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='Unloads an XML document from memory.' ,
arguments={
"node": """root of the XML document to unload """
},
result='returns true if the document was unloaded successfully, false otherwise.' ,
),
url='xmlUnloadFile',
)
],
)
]
| 41.476625
| 275
| 0.364241
| 4,647
| 77,188
| 5.963202
| 0.060039
| 0.060121
| 0.0498
| 0.064884
| 0.992386
| 0.988488
| 0.9716
| 0.9716
| 0.9716
| 0.9716
| 0
| 0.000662
| 0.569713
| 77,188
| 1,860
| 276
| 41.498925
| 0.83368
| 0.000661
| 0
| 0.846872
| 1
| 0.025247
| 0.205821
| 0.007934
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.002195
| 0.000549
| 0
| 0.000549
| 0.001098
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
545e7deb82ced7bb3c2c52e6450bf2db12a564d5
| 7,990
|
py
|
Python
|
src/mlp_test.py
|
zhangcirun/mlp-xor
|
f851e5e8365f6b54edb7620d64f18731baad4158
|
[
"MIT"
] | 2
|
2021-01-11T18:41:37.000Z
|
2021-05-17T09:54:24.000Z
|
src/mlp_test.py
|
zhangcirun/mlp-xor
|
f851e5e8365f6b54edb7620d64f18731baad4158
|
[
"MIT"
] | 1
|
2019-08-03T16:04:30.000Z
|
2019-08-03T16:04:30.000Z
|
src/mlp_test.py
|
zhangcirun/mlp-xor
|
f851e5e8365f6b54edb7620d64f18731baad4158
|
[
"MIT"
] | null | null | null |
import mlp_xor as mymlp
mlp1 = mymlp.MLP2Neuron()
mlp2 = mymlp.MLP4Neuron()
mlp3 = mymlp.MLP8Neuron()
Y_64 = mymlp.np.array([[0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0,
0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0,
0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0,
0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0]])
test_64 = X_64 = mymlp.np.array([[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_0(), mymlp.generate_noise_for_1()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_0()],
[-1, mymlp.generate_noise_for_1(), mymlp.generate_noise_for_1()]])
''' Convergence test'''
mlp1.run_16()
mlp2.run_16()
mlp3.run_16()
mymlp.plt.legend()
mymlp.plt.show()
''' Average losses test '''
Y1 = mlp1.forward(test_64)
Y2 = mlp2.forward(test_64)
Y3 = mlp3.forward(test_64)
Y1_train = mlp1.forward(mymlp.X_64)
Y2_train = mlp2.forward(mymlp.X_64)
Y3_train = mlp3.forward(mymlp.X_64)
loss1 = mymlp.loss(Y_64.T, Y1)
loss2 = mymlp.loss(Y_64.T, Y2)
loss3 = mymlp.loss(Y_64.T, Y3)
loss1_train = mymlp.loss(mymlp.Y_64.T, Y1_train)
loss2_train = mymlp.loss(mymlp.Y_64.T, Y2_train)
loss3_train = mymlp.loss(mymlp.Y_64.T, Y3_train)
print ("======== Test Results ========")
print ("2 units: Test data: " + str(loss1) + " Training data: " + str(loss1_train))
print ("4 units: Test data: " + str(loss2) + " Training data: " + str(loss2_train))
print ("8 units: Test data: " + str(loss3) + " Training data: " + str(loss3_train))
''' Generalisation performance test'''
mlp1.generalisation_test()
mymlp.plt.legend()
mymlp.plt.show()
mlp2.generalisation_test()
mymlp.plt.legend()
mymlp.plt.show()
mlp3.generalisation_test()
mymlp.plt.legend()
mymlp.plt.show()
''' Mapping function visualization'''
mlp1.draw_network()
mymlp.plt.show()
mlp2.draw_network()
mymlp.plt.show()
mlp3.draw_network()
mymlp.plt.show()
| 55.103448
| 99
| 0.540175
| 991
| 7,990
| 3.930373
| 0.056509
| 0.427214
| 0.591528
| 0.690116
| 0.841849
| 0.814121
| 0.807445
| 0.78973
| 0.755841
| 0.755841
| 0
| 0.062916
| 0.323655
| 7,990
| 144
| 100
| 55.486111
| 0.657846
| 0
| 0
| 0.694444
| 0
| 0
| 0.017548
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009259
| 0
| 0.009259
| 0.037037
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
54629b9d3db272f6e503b9c7563350ff8aab9ce3
| 1,107
|
py
|
Python
|
octicons16px/rocket.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | 1
|
2021-01-28T06:47:39.000Z
|
2021-01-28T06:47:39.000Z
|
octicons16px/rocket.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
octicons16px/rocket.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
OCTICON_ROCKET = """
<svg class="octicon octicon-rocket" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path fill-rule="evenodd" d="M14.064 0a8.75 8.75 0 00-6.187 2.563l-.459.458c-.314.314-.616.641-.904.979H3.31a1.75 1.75 0 00-1.49.833L.11 7.607a.75.75 0 00.418 1.11l3.102.954c.037.051.079.1.124.145l2.429 2.428c.046.046.094.088.145.125l.954 3.102a.75.75 0 001.11.418l2.774-1.707a1.75 1.75 0 00.833-1.49V9.485c.338-.288.665-.59.979-.904l.458-.459A8.75 8.75 0 0016 1.936V1.75A1.75 1.75 0 0014.25 0h-.186zM10.5 10.625c-.088.06-.177.118-.266.175l-2.35 1.521.548 1.783 1.949-1.2a.25.25 0 00.119-.213v-2.066zM3.678 8.116L5.2 5.766c.058-.09.117-.178.176-.266H3.309a.25.25 0 00-.213.119l-1.2 1.95 1.782.547zm5.26-4.493A7.25 7.25 0 0114.063 1.5h.186a.25.25 0 01.25.25v.186a7.25 7.25 0 01-2.123 5.127l-.459.458a15.21 15.21 0 01-2.499 2.02l-2.317 1.5-2.143-2.143 1.5-2.317a15.25 15.25 0 012.02-2.5l.458-.458h.002zM12 5a1 1 0 11-2 0 1 1 0 012 0zm-8.44 9.56a1.5 1.5 0 10-2.12-2.12c-.734.73-1.047 2.332-1.15 3.003a.23.23 0 00.265.265c.671-.103 2.273-.416 3.005-1.148z"></path></svg>
"""
| 221.4
| 1,080
| 0.682927
| 296
| 1,107
| 2.550676
| 0.550676
| 0.027815
| 0.02649
| 0.023841
| 0.021192
| 0
| 0
| 0
| 0
| 0
| 0
| 0.583992
| 0.085818
| 1,107
| 4
| 1,081
| 276.75
| 0.162055
| 0
| 0
| 0
| 0
| 0.333333
| 0.9783
| 0.412297
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
54a36de29d55c1b715618e924e8151b1bdd37347
| 67,973
|
py
|
Python
|
models/bert_iter_models.py
|
alibaba/Retrieval-based-Pre-training-for-Machine-Reading-Comprehension
|
b27dc55446a29a53af7fffdad8628ccb545420da
|
[
"Apache-2.0"
] | 7
|
2021-06-16T01:40:23.000Z
|
2021-12-04T02:40:35.000Z
|
models/bert_iter_models.py
|
SparkJiao/Retrieval-based-Pre-training-for-Machine-Reading-Comprehension
|
9ccad31bd0bf2216004cf729d1d511fc3e0b77c9
|
[
"Apache-2.0"
] | 1
|
2021-08-16T09:10:05.000Z
|
2021-08-25T08:44:44.000Z
|
models/bert_iter_models.py
|
SparkJiao/Retrieval-based-Pre-training-for-Machine-Reading-Comprehension
|
9ccad31bd0bf2216004cf729d1d511fc3e0b77c9
|
[
"Apache-2.0"
] | 3
|
2021-09-13T02:03:37.000Z
|
2021-10-11T18:48:21.000Z
|
import torch
from torch import nn
from transformers.modeling_bert import BertConfig, BertPreTrainedModel, BertModel, BertForMaskedLM, \
BertForQuestionAnswering, QuestionAnsweringModelOutput
from general_util.logger import get_child_logger
from general_util.mixin import LogMixin, PredictionMixin
from modules import layers
logger = get_child_logger(__name__)
class BertForMaskedLMBaseline(BertForMaskedLM, LogMixin):
model_prefix = 'bert_mlm_baseline'
def __init__(self, config: BertConfig):
super().__init__(config)
self.config = config
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_metric("mlm_acc", "mlm_loss")
logger.info(self.config.to_dict())
def forward(self, input_ids: torch.Tensor = None,
attention_mask: torch.Tensor = None,
token_type_ids: torch.Tensor = None,
labels: torch.Tensor = None,
**kwargs):
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids
)
sequence_output = outputs[0]
prediction_scores = self.cls(sequence_output)
output_dict = {}
if labels is not None:
masked_lm_loss = self.loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
print(masked_lm_loss)
output_dict["loss"] = masked_lm_loss
if not self.training:
valid_num = (labels != -1).sum().item()
_, mlm_pred = prediction_scores.max(dim=-1)
mlm_acc = (mlm_pred == labels).sum().to(masked_lm_loss.dtype) / valid_num
self.eval_metrics.update("mlm_loss", masked_lm_loss.item(), valid_num)
self.eval_metrics.update("mlm_acc", mlm_acc.item(), valid_num)
output_dict["acc"] = mlm_acc
output_dict["valid_num"] = valid_num
return output_dict
class IterBertPreTrainedConfig(BertConfig):
added_configs = [
'query_dropout', 'cls_type', 'sr_query_dropout', 'lm_query_dropout', 'pos_emb_size',
'z_step', 'num_labels', 'share_mlm_sum', 'share_ssp_sum', 'word_dropout'
]
def __init__(self, query_dropout=0.1, cls_type=0,
sr_query_dropout=0.1, lm_query_dropout=0.1,
pos_emb_size=200, z_step=0, num_labels=2,
share_mlm_sum=False, share_ssp_sum=False,
word_dropout=0.0, **kwargs):
super().__init__(**kwargs)
self.query_dropout = query_dropout
self.cls_type = cls_type
self.sr_query_dropout = sr_query_dropout
self.lm_query_dropout = lm_query_dropout
self.pos_emb_size = pos_emb_size
self.z_step = z_step
self.num_labels = num_labels
self.share_mlm_sum = share_mlm_sum
self.share_ssp_sum = share_ssp_sum
self.word_dropout = word_dropout
def expand_configs(self, *args):
self.added_configs.extend(list(args))
class IterBertModel(BertPreTrainedModel):
config_class = IterBertPreTrainedConfig
model_prefix = 'iter_bert'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.config = config
self.bert = BertModel(config)
config.layer_norm_eps = 1e-5
self.query = layers.MultiHeadAlignedTokenAttention(
config,
attn_dropout_p=config.query_dropout,
dropout_p=config.query_dropout
)
self.z_step = config.z_step
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
**kwargs):
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
batch, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(batch, sent_num * seq_len, self.config.hidden_size)
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
batch, sent_num, seq_len, -1)
q_vec = seq_output[:, :1] # [CLS]
for _step in range(self.z_step):
if _step == 0:
_aligned = False
else:
_aligned = True
q_vec = self.query(q_vec, sent_word_hidden, sent_word_mask, aligned=_aligned, residual=False)
if _step == 0:
q_vec = q_vec.squeeze(1)
hidden_sent = q_vec
assert hidden_sent.size() == (batch, sent_num, seq_output.size(-1))
return hidden_sent, seq_output, sent_word_hidden
class IterBertModelForBiSR(IterBertModel, LogMixin):
model_prefix = 'iter_bert_bi_sr'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sr_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.pre_sr_pooler = layers.Pooler(config.hidden_size)
self.pre_sr_prediction_head = nn.Linear(config.hidden_size, 1)
self.fol_sr_pooler = layers.Pooler(config.hidden_size)
self.fol_sr_prediction_head = nn.Linear(config.hidden_size, 1)
self.sr_dropout = nn.Dropout(config.sr_query_dropout)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
# metric
self.init_metric("sr_acc", "sr_loss")
logger.info(self.config.to_dict())
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
mlm_ids=None, true_sent_ids=None, reverse_sentence_index=None,
answers: torch.Tensor = None, pre_answers: torch.Tensor = None, **kwargs):
hidden_sent, seq_output, sent_word_hidden = super().forward(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
sentence_index=sentence_index,
sentence_mask=sentence_mask,
sent_word_mask=sent_word_mask
)
batch, sent_num, word_num = sent_word_mask.size()
query_num = answers.size(1)
query_h = hidden_sent[:, :query_num]
# SR
sr_query_h = self.sr_sent_sum(query_h)
q_rel_d_sent_h, _ = layers.mul_sentence_sum(
sr_query_h, sent_word_hidden, sent_word_mask
)
pre_sr_scores = self.pre_sr_prediction_head(
self.sr_dropout(self.pre_sr_pooler(q_rel_d_sent_h))).squeeze(-1)
fol_sr_scores = self.fol_sr_prediction_head(
self.sr_dropout(self.fol_sr_pooler(q_rel_d_sent_h))).squeeze(-1)
output_dict = {}
if mlm_ids is not None and answers is not None and pre_answers is not None:
sent_mask = sentence_mask
sent_mask = sent_mask.unsqueeze(1).expand(-1, query_num, -1)
fol_sr_scores = fol_sr_scores + sent_mask * -10000.0
pre_sr_scores = pre_sr_scores + sent_mask * -10000.0
sr_loss1 = self.loss_fct(pre_sr_scores.view(batch * query_num, -1),
pre_answers.view(-1))
sr_loss2 = self.loss_fct(fol_sr_scores.view(batch * query_num, -1),
answers.view(-1))
print(sr_loss1, sr_loss2)
loss = sr_loss1 + sr_loss2
output_dict["loss"] = loss
if not self.training:
valid_num1 = (answers != -1).sum().item()
valid_num2 = (pre_answers != -1).sum().item()
valid_num = valid_num1 + valid_num2
_, pre_pred = torch.max(pre_sr_scores, dim=-1)
_, fol_pred = torch.max(fol_sr_scores, dim=-1)
acc1 = (fol_pred == answers).sum()
acc2 = (pre_pred == pre_answers).sum()
acc = (acc1 + acc2).to(dtype=pre_sr_scores.dtype) / (valid_num * 1.0)
output_dict["acc"] = acc
output_dict["valid_num"] = valid_num
self.eval_metrics.update("sr_acc", acc.item(), valid_num)
self.eval_metrics.update("sr_loss", loss.item(), valid_num)
return output_dict
class IterBertModelForBiSRAndMLM(IterBertModel, LogMixin):
model_prefix = 'iter_bert_bi_sr_mlm'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sr_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.lm_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
word_embedding_weight = self.bert.get_input_embeddings().weight
self.vocab_size = word_embedding_weight.size(0)
config.layer_norm_eps = 1e-5 # avoid fp16 underflow
self.lm_prediction_head = layers.MaskedLMPredictionHead(config, word_embedding_weight)
self.pre_sr_pooler = layers.Pooler(config.hidden_size)
self.pre_sr_prediction_head = nn.Linear(config.hidden_size, 1)
self.fol_sr_pooler = layers.Pooler(config.hidden_size)
self.fol_sr_prediction_head = nn.Linear(config.hidden_size, 1)
self.sr_dropout = nn.Dropout(config.sr_query_dropout)
self.lm_dropout = nn.Dropout(config.lm_query_dropout)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
# metric
self.init_metric("sr_acc", "sr_loss", "mlm_loss", "mlm_acc")
logger.info(self.config.to_dict())
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
mlm_ids=None, true_sent_ids=None, reverse_sentence_index=None,
answers: torch.Tensor = None, pre_answers: torch.Tensor = None, **kwargs):
hidden_sent, seq_output, sent_word_hidden = super().forward(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
sentence_index=sentence_index,
sentence_mask=sentence_mask,
sent_word_mask=sent_word_mask
)
batch, sent_num, word_num = sent_word_mask.size()
query_num = answers.size(1)
query_h = hidden_sent[:, :query_num]
attention_mask = attention_mask.to(query_h.dtype)
# SR
sr_query_h = self.sr_sent_sum(query_h)
q_rel_d_sent_h, _ = layers.mul_sentence_sum(
sr_query_h, sent_word_hidden, sent_word_mask
)
pre_sr_scores = self.pre_sr_prediction_head(
self.sr_dropout(self.pre_sr_pooler(q_rel_d_sent_h))).squeeze(-1)
fol_sr_scores = self.fol_sr_prediction_head(
self.sr_dropout(self.fol_sr_pooler(q_rel_d_sent_h))).squeeze(-1)
# MLM
lm_query_h = self.lm_sent_sum(query_h)
query_token_num = mlm_ids.size(1)
q_rel_d_h, _ = layers.mul_weighted_sum(
lm_query_h, seq_output, 1 - attention_mask
)
q_rel_d_h = self.lm_dropout(q_rel_d_h)
aligned_sent_hidden = q_rel_d_h.gather(
dim=1,
index=reverse_sentence_index.unsqueeze(-1).expand(-1, -1, seq_output.size(-1))
)
concat_word_hidden = torch.cat([seq_output[:, :query_token_num], aligned_sent_hidden], dim=-1)
mlm_scores = self.lm_prediction_head(concat_word_hidden)
output_dict = {}
if mlm_ids is not None and answers is not None and pre_answers is not None:
sent_mask = sentence_mask
sent_mask = sent_mask.unsqueeze(1).expand(-1, query_num, -1)
fol_sr_scores = fol_sr_scores + sent_mask * -10000.0
pre_sr_scores = pre_sr_scores + sent_mask * -10000.0
sr_loss1 = self.loss_fct(pre_sr_scores.view(batch * query_num, -1),
pre_answers.view(-1))
sr_loss2 = self.loss_fct(fol_sr_scores.view(batch * query_num, -1),
answers.view(-1))
mlm_loss = self.loss_fct(mlm_scores.view(-1, self.config.vocab_size),
mlm_ids.view(-1))
print(sr_loss1, sr_loss2, mlm_loss)
loss = sr_loss1 + sr_loss2 + mlm_loss
output_dict["loss"] = loss
if not self.training:
_, mlm_pred = mlm_scores.max(dim=-1)
mlm_valid_num = (mlm_ids != -1).sum().item()
mlm_acc = (mlm_pred == mlm_ids).sum().to(loss.dtype).item() / mlm_valid_num
self.eval_metrics.update("mlm_loss", mlm_loss.item(), mlm_valid_num)
self.eval_metrics.update("mlm_acc", mlm_acc, mlm_valid_num)
valid_num1 = (answers != -1).sum().item()
valid_num2 = (pre_answers != -1).sum().item()
valid_num = valid_num1 + valid_num2
_, pre_pred = torch.max(pre_sr_scores, dim=-1)
_, fol_pred = torch.max(fol_sr_scores, dim=-1)
acc1 = (fol_pred == answers).sum()
acc2 = (pre_pred == pre_answers).sum()
acc = (acc1 + acc2).to(dtype=pre_sr_scores.dtype) / (valid_num * 1.0)
output_dict["acc"] = acc
output_dict["valid_num"] = valid_num
self.eval_metrics.update("sr_acc", acc.item(), valid_num)
self.eval_metrics.update("sr_loss", loss.item(), valid_num)
return output_dict
class IterBertModelForSRAndMLM(IterBertModel, LogMixin):
model_prefix = 'iter_bert_sr_mlm'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sr_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.lm_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
word_embedding_weight = self.bert.get_input_embeddings().weight
self.vocab_size = word_embedding_weight.size(0)
config.layer_norm_eps = 1e-5 # avoid fp16 underflow
self.lm_prediction_head = layers.MaskedLMPredictionHead(config, word_embedding_weight)
self.sr_pooler = layers.Pooler(config.hidden_size)
self.sr_prediction_head = nn.Linear(config.hidden_size, 1)
self.sr_dropout = nn.Dropout(config.sr_query_dropout)
self.lm_dropout = nn.Dropout(config.lm_query_dropout)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
# metric
self.init_metric("sr_acc", "sr_loss", "mlm_loss", "mlm_acc")
logger.info(self.config.to_dict())
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
mlm_ids=None, true_sent_ids=None, reverse_sentence_index=None,
answers: torch.Tensor = None, pre_answers: torch.Tensor = None, **kwargs):
hidden_sent, seq_output, sent_word_hidden = super().forward(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
sentence_index=sentence_index,
sentence_mask=sentence_mask,
sent_word_mask=sent_word_mask
)
batch, sent_num, word_num = sent_word_mask.size()
query_num = answers.size(1)
query_h = hidden_sent[:, :query_num]
attention_mask = attention_mask.to(query_h.dtype)
# SR
sr_query_h = self.sr_sent_sum(query_h)
q_rel_d_sent_h, _ = layers.mul_sentence_sum(
sr_query_h, sent_word_hidden, sent_word_mask
)
sr_scores = self.sr_prediction_head(
self.sr_dropout(self.sr_pooler(q_rel_d_sent_h))
).squeeze(-1)
# MLM
lm_query_h = self.lm_sent_sum(query_h)
query_token_num = mlm_ids.size(1)
q_rel_d_h, _ = layers.mul_weighted_sum(
lm_query_h, seq_output, 1 - attention_mask
)
q_rel_d_h = self.lm_dropout(q_rel_d_h)
aligned_sent_hidden = q_rel_d_h.gather(
dim=1,
index=reverse_sentence_index.unsqueeze(-1).expand(-1, -1, seq_output.size(-1))
)
concat_word_hidden = torch.cat([seq_output[:, :query_token_num], aligned_sent_hidden], dim=-1)
mlm_scores = self.lm_prediction_head(concat_word_hidden)
output_dict = {}
if mlm_ids is not None and answers is not None and pre_answers is not None:
sent_mask = sentence_mask
sent_mask = sent_mask.unsqueeze(1).expand(-1, query_num, -1)
sr_scores = sr_scores + sent_mask * -10000.0
fol_masked_scores = layers.mask_scores_with_labels(sr_scores, answers).contiguous()
sr_loss1 = self.loss_fct(fol_masked_scores.view(batch * query_num, -1),
pre_answers.view(-1))
pre_masked_scores = layers.mask_scores_with_labels(sr_scores, pre_answers).contiguous()
sr_loss2 = self.loss_fct(pre_masked_scores.view(batch * query_num, -1),
answers.view(-1))
mlm_loss = self.loss_fct(mlm_scores.view(-1, self.config.vocab_size),
mlm_ids.view(-1))
print(sr_loss1, sr_loss2, mlm_loss)
loss = sr_loss1 + sr_loss2 + mlm_loss
output_dict["loss"] = loss
if not self.training:
_, mlm_pred = mlm_scores.max(dim=-1)
mlm_valid_num = (mlm_ids != -1).sum().item()
mlm_acc = (mlm_pred == mlm_ids).sum().to(loss.dtype).item() / mlm_valid_num
self.eval_metrics.update("mlm_loss", mlm_loss.item(), mlm_valid_num)
self.eval_metrics.update("mlm_acc", mlm_acc, mlm_valid_num)
valid_num1 = (answers != -1).sum().item()
valid_num2 = (pre_answers != -1).sum().item()
valid_num = valid_num1 + valid_num2
_, pred = torch.topk(sr_scores, k=2, dim=-1, largest=True)
acc1 = (pred == answers.unsqueeze(-1)).sum()
acc2 = (pred == pre_answers.unsqueeze(-1)).sum()
acc = (acc1 + acc2).to(dtype=sr_scores.dtype) / (valid_num * 1.0)
output_dict["acc"] = acc
output_dict["valid_num"] = valid_num
self.eval_metrics.update("sr_acc", acc.item(), valid_num)
self.eval_metrics.update("sr_loss", loss.item(), valid_num)
return output_dict
class IterBertModelForSR(IterBertModel, LogMixin):
model_prefix = 'iter_bert_sr'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sr_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.sr_pooler = layers.Pooler(config.hidden_size)
self.sr_prediction_head = nn.Linear(config.hidden_size, 1)
self.sr_dropout = nn.Dropout(config.sr_query_dropout)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
# metric
self.init_metric("sr_acc", "sr_loss")
logger.info(self.config.to_dict())
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
mlm_ids=None, true_sent_ids=None, reverse_sentence_index=None,
answers: torch.Tensor = None, pre_answers: torch.Tensor = None, **kwargs):
hidden_sent, seq_output, sent_word_hidden = super().forward(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
sentence_index=sentence_index,
sentence_mask=sentence_mask,
sent_word_mask=sent_word_mask
)
batch, sent_num, word_num = sent_word_mask.size()
query_num = answers.size(1)
query_h = hidden_sent[:, :query_num]
attention_mask = attention_mask.to(query_h.dtype)
# SR
sr_query_h = self.sr_sent_sum(query_h)
q_rel_d_sent_h, _ = layers.mul_sentence_sum(
sr_query_h, sent_word_hidden, sent_word_mask
)
sr_scores = self.sr_prediction_head(
self.sr_dropout(self.sr_pooler(q_rel_d_sent_h))
).squeeze(-1)
output_dict = {}
if answers is not None and pre_answers is not None:
sent_mask = sentence_mask
sent_mask = sent_mask.unsqueeze(1).expand(-1, query_num, -1)
sr_scores = sr_scores + sent_mask * -10000.0
fol_masked_scores = layers.mask_scores_with_labels(sr_scores, answers).contiguous()
sr_loss1 = self.loss_fct(fol_masked_scores.view(batch * query_num, -1),
pre_answers.view(-1))
pre_masked_scores = layers.mask_scores_with_labels(sr_scores, pre_answers).contiguous()
sr_loss2 = self.loss_fct(pre_masked_scores.view(batch * query_num, -1),
answers.view(-1))
print(sr_loss1, sr_loss2)
loss = sr_loss1 + sr_loss2
output_dict["loss"] = loss
if not self.training:
valid_num1 = (answers != -1).sum().item()
valid_num2 = (pre_answers != -1).sum().item()
valid_num = valid_num1 + valid_num2
_, pred = torch.topk(sr_scores, k=2, dim=-1, largest=True)
acc1 = (pred == answers.unsqueeze(-1)).sum()
acc2 = (pred == pre_answers.unsqueeze(-1)).sum()
acc = (acc1 + acc2).to(dtype=sr_scores.dtype) / (valid_num * 1.0)
output_dict["acc"] = acc
output_dict["valid_num"] = valid_num
self.eval_metrics.update("sr_acc", acc.item(), valid_num)
self.eval_metrics.update("sr_loss", loss.item(), valid_num)
return output_dict
class IterBertModelForMLM(IterBertModel, LogMixin):
model_prefix = 'iter_bert_mlm'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.lm_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
word_embedding_weight = self.bert.get_input_embeddings().weight
self.vocab_size = word_embedding_weight.size(0)
config.layer_norm_eps = 1e-5 # avoid fp16 underflow
self.lm_prediction_head = layers.MaskedLMPredictionHead(config, word_embedding_weight)
self.lm_dropout = nn.Dropout(config.lm_query_dropout)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
# metric
self.init_metric("mlm_loss", "mlm_acc")
logger.info(self.config.to_dict())
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
mlm_ids: torch.Tensor = None, true_sent_ids=None, reverse_sentence_index=None,
answers: torch.Tensor = None, pre_answers: torch.Tensor = None, **kwargs):
hidden_sent, seq_output, sent_word_hidden = super().forward(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
sentence_index=sentence_index,
sentence_mask=sentence_mask,
sent_word_mask=sent_word_mask
)
batch, sent_num, word_num = sent_word_mask.size()
query_num = answers.size(1)
query_h = hidden_sent[:, :query_num]
attention_mask = attention_mask.to(query_h.dtype)
# MLM
lm_query_h = self.lm_sent_sum(query_h)
query_token_num = mlm_ids.size(1)
q_rel_d_h, _ = layers.mul_weighted_sum(
lm_query_h, seq_output, 1 - attention_mask
)
q_rel_d_h = self.lm_dropout(q_rel_d_h)
aligned_sent_hidden = q_rel_d_h.gather(
dim=1,
index=reverse_sentence_index.unsqueeze(-1).expand(-1, -1, seq_output.size(-1))
)
concat_word_hidden = torch.cat([seq_output[:, :query_token_num], aligned_sent_hidden], dim=-1)
mlm_scores = self.lm_prediction_head(concat_word_hidden)
output_dict = {}
if mlm_ids is not None:
mlm_loss = self.loss_fct(mlm_scores.view(-1, self.config.vocab_size),
mlm_ids.view(-1))
print(mlm_loss)
loss = mlm_loss
output_dict["loss"] = loss
if not self.training:
_, mlm_pred = mlm_scores.max(dim=-1)
mlm_valid_num = (mlm_ids != -1).sum().item()
mlm_acc = (mlm_pred == mlm_ids).sum().to(loss.dtype) / mlm_valid_num
self.eval_metrics.update("mlm_loss", mlm_loss.item(), mlm_valid_num)
self.eval_metrics.update("mlm_acc", mlm_acc.item(), mlm_valid_num)
output_dict["acc"] = mlm_acc
output_dict["valid_num"] = mlm_valid_num
return output_dict
class IterBertModelForMCRC(IterBertModel):
model_prefix = 'iter_bert_mcrc'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
if config.share_ssp_sum:
self.sr_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.sent_sum = self.sr_sent_sum
if config.word_dropout > 0:
self.word_dropout = nn.Dropout(config.word_dropout)
else:
self.word_dropout = lambda x: x
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, 1)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
@staticmethod
def fold_tensor(x):
if x is None:
return None
return x.reshape(x.size(0) * x.size(1), *x.size()[2:])
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
batch, num_choice, _ = input_ids.size()
input_ids = self.fold_tensor(input_ids)
token_type_ids = self.fold_tensor(token_type_ids)
attention_mask = self.fold_tensor(attention_mask)
sentence_index = self.fold_tensor(sentence_index)
sent_word_mask = self.fold_tensor(sent_word_mask)
sentence_mask = self.fold_tensor(sentence_mask)
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
fb, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(fb, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
fb, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(fb, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(fb, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(fb, seq_output.size(-1))
# =====================================
q_op_query = self.sent_sum(q_op_hidden_sent)
p_hidden_sent, _ = layers.sentence_sum(
q=q_op_query,
kv=sent_word_hidden[:, 2:],
mask=sent_word_mask[:, 2:],
_dropout=self.word_dropout
)
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
attended_h, _ = layers.weighted_sum(q_op_query, p_hidden_sent, sentence_mask[:, 2:])
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input))).view(batch, num_choice)
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
return outputs
class IterBertModelForMCRCDropout(IterBertModel):
model_prefix = 'iter_bert_mcrc_d'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
if config.share_ssp_sum:
self.sr_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.sent_sum = self.sr_sent_sum
if config.word_dropout > 0:
self.word_dropout = nn.Dropout(config.word_dropout)
else:
self.word_dropout = lambda x: x
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, 1)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
@staticmethod
def fold_tensor(x):
if x is None:
return None
return x.reshape(x.size(0) * x.size(1), *x.size()[2:])
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
batch, num_choice, _ = input_ids.size()
input_ids = self.fold_tensor(input_ids)
token_type_ids = self.fold_tensor(token_type_ids)
attention_mask = self.fold_tensor(attention_mask)
sentence_index = self.fold_tensor(sentence_index)
sent_word_mask = self.fold_tensor(sent_word_mask)
sentence_mask = self.fold_tensor(sentence_mask)
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
fb, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(fb, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
fb, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(fb, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(fb, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(fb, seq_output.size(-1))
# =====================================
q_op_query = self.sent_sum(q_op_hidden_sent)
p_hidden_sent, _ = layers.sentence_sum(
q=q_op_query,
kv=self.word_dropout(sent_word_hidden[:, 2:]),
mask=sent_word_mask[:, 2:],
v=sent_word_hidden[:, 2:]
)
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
attended_h, _ = layers.weighted_sum(q_op_query, p_hidden_sent, sentence_mask[:, 2:])
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input))).view(batch, num_choice)
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
return outputs
class IterBertModelForMCRC2(IterBertModel):
model_prefix = 'iter_bert_mcrc2'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
if config.share_ssp_sum:
self.sr_sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.sent_sum = self.sr_sent_sum
self.doc_sum = nn.Linear(config.hidden_size, config.hidden_size)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, 1)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
@staticmethod
def fold_tensor(x):
if x is None:
return None
return x.reshape(x.size(0) * x.size(1), *x.size()[2:])
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
batch, num_choice, _ = input_ids.size()
input_ids = self.fold_tensor(input_ids)
token_type_ids = self.fold_tensor(token_type_ids)
attention_mask = self.fold_tensor(attention_mask)
sentence_index = self.fold_tensor(sentence_index)
sent_word_mask = self.fold_tensor(sent_word_mask)
sentence_mask = self.fold_tensor(sentence_mask)
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
fb, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(fb, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
fb, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(fb, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(fb, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(fb, seq_output.size(-1))
# =====================================
q_op_query1 = self.sent_sum(q_op_hidden_sent)
p_hidden_sent, _ = layers.sentence_sum(q_op_query1, sent_word_hidden[:, 2:], sent_word_mask[:, 2:])
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
q_op_query2 = self.doc_sum(q_op_hidden_sent)
attended_h, _ = layers.weighted_sum(q_op_query2, p_hidden_sent, sentence_mask[:, 2:])
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input))).view(batch, num_choice)
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
return outputs
class IterBertModelForMCRC3(IterBertModel):
model_prefix = 'iter_bert_mcrc3'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sen_sum_q = nn.Linear(config.hidden_size, config.hidden_size)
self.sen_sum_k = nn.Linear(config.hidden_size, config.hidden_size)
self.doc_sum_q = nn.Linear(config.hidden_size, config.hidden_size)
self.doc_sum_k = nn.Linear(config.hidden_size, config.hidden_size)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, 1)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
@staticmethod
def fold_tensor(x):
if x is None:
return None
return x.reshape(x.size(0) * x.size(1), *x.size()[2:])
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
batch, num_choice, _ = input_ids.size()
input_ids = self.fold_tensor(input_ids)
token_type_ids = self.fold_tensor(token_type_ids)
attention_mask = self.fold_tensor(attention_mask)
sentence_index = self.fold_tensor(sentence_index)
sent_word_mask = self.fold_tensor(sent_word_mask)
sentence_mask = self.fold_tensor(sentence_mask)
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
fb, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(fb, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
fb, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(fb, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(fb, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(fb, seq_output.size(-1))
# =====================================
p_hidden_sent, _ = layers.sentence_sum(
q=self.sen_sum_q(q_op_hidden_sent),
kv=self.sen_sum_k(sent_word_hidden[:, 2:]),
mask=sent_word_mask[:, 2:]
)
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
attended_h, _scores = layers.weighted_sum(
q=self.doc_sum_q(q_op_hidden_sent),
kv=self.doc_sum_k(p_hidden_sent),
mask=sentence_mask[:, 2:]
)
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input))).view(batch, num_choice)
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
return outputs
class IterBertModelForMCRC4(IterBertModel):
model_prefix = 'iter_bert_mcrc4'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sen_sum_q = nn.Linear(config.hidden_size, config.hidden_size)
self.sen_sum_k = nn.Linear(config.hidden_size, config.hidden_size)
self.doc_sum_q = nn.Linear(config.hidden_size, config.hidden_size)
self.doc_sum_k = nn.Linear(config.hidden_size, config.hidden_size)
if config.word_dropout > 0:
self.word_dropout = nn.Dropout(config.word_dropout)
else:
self.word_dropout = lambda x: x
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, 1)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
@staticmethod
def fold_tensor(x):
if x is None:
return None
return x.reshape(x.size(0) * x.size(1), *x.size()[2:])
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
batch, num_choice, _ = input_ids.size()
input_ids = self.fold_tensor(input_ids)
token_type_ids = self.fold_tensor(token_type_ids)
attention_mask = self.fold_tensor(attention_mask)
sentence_index = self.fold_tensor(sentence_index)
sent_word_mask = self.fold_tensor(sent_word_mask)
sentence_mask = self.fold_tensor(sentence_mask)
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
fb, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(fb, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
fb, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(fb, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(fb, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(fb, seq_output.size(-1))
# =====================================
p_hidden_sent, _ = layers.sentence_sum(
q=self.sen_sum_q(q_op_hidden_sent),
kv=self.sen_sum_k(sent_word_hidden[:, 2:]),
mask=sent_word_mask[:, 2:],
v=sent_word_hidden[:, 2:],
_dropout=self.word_dropout
)
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
attended_h, _scores = layers.weighted_sum(
q=self.doc_sum_q(q_op_hidden_sent),
kv=self.doc_sum_k(p_hidden_sent),
mask=sentence_mask[:, 2:],
v=p_hidden_sent
)
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input))).view(batch, num_choice)
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
return outputs
class IterBertModelForSequenceClassification(IterBertModel, PredictionMixin):
model_prefix = 'iter_bert_sc'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
batch, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(batch, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
batch, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(batch, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(batch, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(batch, seq_output.size(-1))
# =====================================
q_op_query = self.sent_sum(q_op_hidden_sent)
p_hidden_sent, _ = layers.sentence_sum(q_op_query, sent_word_hidden[:, 2:], sent_word_mask[:, 2:])
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
attended_h, _scores = layers.weighted_sum(q_op_query, p_hidden_sent, sentence_mask[:, 2:])
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input)))
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
# prediction utils
if not self.training:
self.concat_predict_tensors(sentence_logits=_scores,
sent_word_ids=input_ids.gather(dim=1, index=sentence_index[:, :, 0]).reshape(
batch, sent_num, seq_len))
return outputs
class IterBertModelForSequenceClassificationV2(IterBertModel, PredictionMixin):
model_prefix = 'iter_bert_sc_v2'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sent_sum = nn.Linear(config.hidden_size, config.hidden_size)
self.doc_sum = nn.Linear(config.hidden_size, config.hidden_size)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
batch, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(batch, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
batch, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(batch, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(batch, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(batch, seq_output.size(-1))
# =====================================
q_op_query1 = self.sent_sum(q_op_hidden_sent)
p_hidden_sent, _ = layers.sentence_sum(q_op_query1, sent_word_hidden[:, 2:], sent_word_mask[:, 2:])
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
q_op_query2 = self.doc_sum(q_op_hidden_sent)
attended_h, _scores = layers.weighted_sum(q_op_query2, p_hidden_sent, sentence_mask[:, 2:])
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input)))
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
# prediction utils
if not self.training:
self.concat_predict_tensors(sentence_logits=_scores,
sent_word_ids=input_ids.gather(dim=1, index=sentence_index[:, :, 0]).reshape(
batch, sent_num, seq_len))
return outputs
class IterBertModelForSequenceClassificationV3(IterBertModel, PredictionMixin):
model_prefix = 'iter_bert_sc_v3'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sen_sum_q = nn.Linear(config.hidden_size, config.hidden_size)
self.sen_sum_k = nn.Linear(config.hidden_size, config.hidden_size)
self.doc_sum_q = nn.Linear(config.hidden_size, config.hidden_size)
self.doc_sum_k = nn.Linear(config.hidden_size, config.hidden_size)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
batch, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(batch, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
batch, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(batch, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(batch, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(batch, seq_output.size(-1))
# =====================================
p_hidden_sent, _ = layers.sentence_sum(
q=self.sen_sum_q(q_op_hidden_sent),
kv=self.sen_sum_k(sent_word_hidden[:, 2:]),
mask=sent_word_mask[:, 2:]
)
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
attended_h, _scores = layers.weighted_sum(
q=self.doc_sum_q(q_op_hidden_sent),
kv=self.doc_sum_k(p_hidden_sent),
mask=sentence_mask[:, 2:]
)
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input)))
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
# prediction utils
if not self.training:
self.concat_predict_tensors(sentence_logits=_scores.float(),
sent_word_ids=input_ids.gather(dim=1, index=sentence_index[:, :, 0]).reshape(
batch, sent_num, seq_len).int())
return outputs
class IterBertModelForSequenceClassificationV4(IterBertModel, PredictionMixin):
model_prefix = 'iter_bert_sc_v4'
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.sen_sum_q = nn.Linear(config.hidden_size, config.hidden_size, bias=False)
self.sen_sum_k = nn.Linear(config.hidden_size, config.hidden_size, bias=False)
self.doc_sum_q = nn.Linear(config.hidden_size, config.hidden_size, bias=False)
self.doc_sum_k = nn.Linear(config.hidden_size, config.hidden_size, bias=False)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size * 2, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None,
sentence_index=None, sentence_mask=None, sent_word_mask=None,
labels=None, **kwargs):
seq_output = self.bert(input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
batch, sent_num, seq_len = sent_word_mask.size()
sentence_index = sentence_index.unsqueeze(-1).expand(
-1, -1, -1, self.config.hidden_size
).reshape(batch, sent_num * seq_len, self.config.hidden_size)
cls_h = seq_output[:, :1]
sent_word_hidden = seq_output.gather(dim=1, index=sentence_index).reshape(
batch, sent_num, seq_len, -1)
sent_word_hidden = sent_word_hidden * (1 - sent_word_mask.unsqueeze(-1))
q_op_word_hidden = sent_word_hidden[:, :2].reshape(batch, 1, 2 * seq_len, -1)
q_op_word_mask = sent_word_mask[:, :2].reshape(batch, 1, 2 * seq_len)
q_op_hidden_sent = self.query(cls_h, q_op_word_hidden, q_op_word_mask,
aligned=False, residual=False).view(batch, seq_output.size(-1))
# =====================================
p_hidden_sent, _ = layers.sentence_sum(
q=self.sen_sum_q(q_op_hidden_sent),
kv=self.sen_sum_k(sent_word_hidden[:, 2:]),
v=sent_word_hidden[:, 2:],
mask=sent_word_mask[:, 2:]
)
p_hidden_sent = p_hidden_sent * (1 - sentence_mask[:, 2:].unsqueeze(-1))
attended_h, _scores = layers.weighted_sum(
q=self.doc_sum_q(q_op_hidden_sent),
kv=self.doc_sum_k(p_hidden_sent),
v=p_hidden_sent,
mask=sentence_mask[:, 2:]
)
cls_input = torch.cat([q_op_hidden_sent, attended_h], dim=-1)
logits = self.classifier(self.dropout(self.pooler(cls_input)))
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
# prediction utils
if not self.training:
self.concat_predict_tensors(sentence_logits=_scores,
sent_word_ids=input_ids.gather(dim=1, index=sentence_index[:, :, 0]).reshape(
batch, sent_num, seq_len))
return outputs
class BertForMultipleChoice(BertPreTrainedModel):
model_prefix = 'bert_mcrc'
config_class = IterBertPreTrainedConfig
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, 1)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None, labels=None, **kwargs):
batch, num_choices = input_ids.size()[:2]
input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None
token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None
seq_output = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids
)[0]
logits = self.classifier(self.dropout(self.pooler(seq_output[:, 0])))
logits = logits.view(-1, num_choices)
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
return outputs
class BertForSequenceClassification(BertPreTrainedModel):
model_prefix = 'bert_sc'
config_class = IterBertPreTrainedConfig
def __init__(self, config: IterBertPreTrainedConfig):
super().__init__(config)
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if config.cls_type == 1:
self.pooler = nn.Sequential(
nn.Linear(config.hidden_size, config.hidden_size),
nn.Tanh()
)
else:
self.pooler = nn.Linear(config.hidden_size, config.hidden_size)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
self.init_weights()
def forward(self, input_ids, attention_mask=None, token_type_ids=None, labels=None, **kwargs):
batch, num_choices = input_ids.size()[:2]
# input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
# attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None
# token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None
seq_output = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids
)[0]
logits = self.classifier(self.dropout(self.pooler(seq_output[:, 0])))
# logits = logits.view(-1, num_choices)
outputs = (logits,)
if labels is not None:
loss = self.loss_fct(logits, labels)
outputs = (loss,) + outputs
_, pred = logits.max(dim=-1)
acc = torch.sum(pred == labels) / (1.0 * batch)
outputs = outputs + (acc,)
return outputs
class IterBertForQuestionAnswering(IterBertModel):
model_prefix = 'iter_bert_span'
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels * config.hidden_size)
self.init_weights()
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
start_positions=None,
end_positions=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
batch, seq_len = input_ids.size()
# `token_type_ids`: [0,0,0,1,1,1,1,0,0,0]
# `attention_mask`: [1,1,1,1,1,1,1,0,0,0]
# `1` for true token and `0` for mask
question_mask = (1 - token_type_ids) * attention_mask
passage_mask = token_type_ids * attention_mask
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
d = sequence_output.size(-1)
cls_h = sequence_output[:, :1]
question_mask = question_mask.to(sequence_output.dtype)
passage_mask = passage_mask.to(sequence_output.dtype)
attention_mask = attention_mask.to(sequence_output.dtype)
q_hidden = self.query(cls_h, sequence_output.unsqueeze(1), 1 - question_mask.unsqueeze(1),
aligned=True, residual=False).view(batch, d)
bilinear_q = self.qa_outputs(q_hidden).view(batch, self.num_labels, d)
# [batch, 2, d], [batch, seq_len, d] -> [batch, seq_len, 2]
logits = torch.einsum("bih,bjh->bji", bilinear_q, sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1)
end_logits = end_logits.squeeze(-1)
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions.clamp_(0, ignored_index)
end_positions.clamp_(0, ignored_index)
loss_fct = nn.CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[2:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
iter_bert_models_map = {
BertForMaskedLMBaseline.model_prefix: BertForMaskedLMBaseline,
IterBertModelForBiSR.model_prefix: IterBertModelForBiSR,
IterBertModelForBiSRAndMLM.model_prefix: IterBertModelForBiSRAndMLM,
IterBertModelForSRAndMLM.model_prefix: IterBertModelForSRAndMLM,
IterBertModelForSR.model_prefix: IterBertModelForSR,
IterBertModelForMLM.model_prefix: IterBertModelForMLM,
IterBertModelForMCRC.model_prefix: IterBertModelForMCRC,
IterBertModelForMCRCDropout.model_prefix: IterBertModelForMCRCDropout,
IterBertModelForMCRC2.model_prefix: IterBertModelForMCRC2,
IterBertModelForMCRC3.model_prefix: IterBertModelForMCRC3,
IterBertModelForMCRC4.model_prefix: IterBertModelForMCRC4,
IterBertModelForSequenceClassification.model_prefix: IterBertModelForSequenceClassification,
IterBertModelForSequenceClassificationV2.model_prefix: IterBertModelForSequenceClassificationV2,
IterBertModelForSequenceClassificationV3.model_prefix: IterBertModelForSequenceClassificationV3,
IterBertModelForSequenceClassificationV4.model_prefix: IterBertModelForSequenceClassificationV4,
BertForMultipleChoice.model_prefix: BertForMultipleChoice,
BertForSequenceClassification.model_prefix: BertForSequenceClassification
}
| 39.087407
| 118
| 0.60449
| 8,493
| 67,973
| 4.481455
| 0.030731
| 0.052337
| 0.065159
| 0.038359
| 0.868474
| 0.85384
| 0.846615
| 0.834923
| 0.829616
| 0.827225
| 0
| 0.014167
| 0.289703
| 67,973
| 1,738
| 119
| 39.109896
| 0.774155
| 0.017345
| 0
| 0.79654
| 0
| 0
| 0.010982
| 0
| 0
| 0
| 0
| 0
| 0.000824
| 1
| 0.037068
| false
| 0.001647
| 0.004942
| 0
| 0.102142
| 0.004942
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49ae3aa0c2e4b604cd6f9900630d429a2d04caf1
| 22,562
|
py
|
Python
|
python/tests/test_binning.py
|
nsmith-/aghast
|
b3479a0e14bcdca6736f53c528101136cccbf9c1
|
[
"BSD-3-Clause"
] | null | null | null |
python/tests/test_binning.py
|
nsmith-/aghast
|
b3479a0e14bcdca6736f53c528101136cccbf9c1
|
[
"BSD-3-Clause"
] | null | null | null |
python/tests/test_binning.py
|
nsmith-/aghast
|
b3479a0e14bcdca6736f53c528101136cccbf9c1
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2019, IRIS-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
import numpy
from aghast import *
class Test(unittest.TestCase):
def runTest(self):
pass
def test_binning_IntegerBinning(self):
h = Histogram([Axis(IntegerBinning(10, 20))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(11))))
assert h.axis[0].binning.toCategoryBinning().categories == ["10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20"]
assert h.axis[0].binning.toRegularBinning().toCategoryBinning().categories == ["[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)"]
assert h.axis[0].binning.toSparseRegularBinning().toCategoryBinning().categories == ["[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)"]
h = Histogram([Axis(IntegerBinning(10, 20, loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.below1))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(13))))
assert h.axis[0].binning.toCategoryBinning().categories == ["(-inf, 9]", "[21, +inf)", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20"]
assert h.axis[0].binning.toRegularBinning().toCategoryBinning().categories == ["[-inf, 9.5)", "[20.5, +inf]", "[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[-inf, 9.5)", "[20.5, +inf]", "[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-inf, 9.5)", "[20.5, +inf]", "[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)"]
h = Histogram([Axis(IntegerBinning(10, 20, loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.above1))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(13))))
assert h.axis[0].binning.toCategoryBinning().categories == ["(-inf, 9]", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "[21, +inf)"]
assert h.axis[0].binning.toRegularBinning().toCategoryBinning().categories == ["[-inf, 9.5)", "[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)", "[20.5, +inf]"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[-inf, 9.5)", "[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)", "[20.5, +inf]"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-inf, 9.5)", "[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)", "[20.5, +inf]"]
h = Histogram([Axis(IntegerBinning(10, 20, loc_underflow=RealOverflow.above2, loc_overflow=RealOverflow.above1))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(13))))
assert h.axis[0].binning.toCategoryBinning().categories == ["10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "[21, +inf)", "(-inf, 9]"]
assert h.axis[0].binning.toRegularBinning().toCategoryBinning().categories == ["[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)", "[20.5, +inf]", "[-inf, 9.5)"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)", "[20.5, +inf]", "[-inf, 9.5)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[9.5, 10.5)", "[10.5, 11.5)", "[11.5, 12.5)", "[12.5, 13.5)", "[13.5, 14.5)", "[14.5, 15.5)", "[15.5, 16.5)", "[16.5, 17.5)", "[17.5, 18.5)", "[18.5, 19.5)", "[19.5, 20.5)", "[20.5, +inf]", "[-inf, 9.5)"]
def test_binning_RegularBinning(self):
h = Histogram([Axis(RegularBinning(10, RealInterval(0.1, 10.1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(10))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[0.1, 1.1)", "[1.1, 2.1)", "[2.1, 3.1)", "[3.1, 4.1)", "[4.1, 5.1)", "[5.1, 6.1)", "[6.1, 7.1)", "[7.1, 8.1)", "[8.1, 9.1)", "[9.1, 10.1)"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[0.1, 1.1)", "[1.1, 2.1)", "[2.1, 3.1)", "[3.1, 4.1)", "[4.1, 5.1)", "[5.1, 6.1)", "[6.1, 7.1)", "[7.1, 8.1)", "[8.1, 9.1)", "[9.1, 10.1)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[0.1, 1.1)", "[1.1, 2.1)", "[2.1, 3.1)", "[3.1, 4.1)", "[4.1, 5.1)", "[5.1, 6.1)", "[6.1, 7.1)", "[7.1, 8.1)", "[8.1, 9.1)", "[9.1, 10.1)"]
assert h.axis[0].binning.toSparseRegularBinning().toCategoryBinning().categories == ["[0.1, 1.1)", "[1.1, 2.1)", "[2.1, 3.1)", "[3.1, 4.1)", "[4.1, 5.1)", "[5.1, 6.1)", "[6.1, 7.1)", "[7.1, 8.1)", "[8.1, 9.1)", "[9.1, 10.1)"]
h = Histogram([Axis(RegularBinning(10, RealInterval(-0.9, 9.1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(10))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-0.9, 0.1)", "[0.1, 1.1)", "[1.1, 2.1)", "[2.1, 3.1)", "[3.1, 4.1)", "[4.1, 5.1)", "[5.1, 6.1)", "[6.1, 7.1)", "[7.1, 8.1)", "[8.1, 9.1)"]
assert h.axis[0].binning.toSparseRegularBinning().toCategoryBinning().categories == ["[-0.9, 0.1)", "[0.1, 1.1)", "[1.1, 2.1)", "[2.1, 3.1)", "[3.1, 4.1)", "[4.1, 5.1)", "[5.1, 6.1)", "[6.1, 7.1)", "[7.1, 8.1)", "[8.1, 9.1)"]
h = Histogram([Axis(RegularBinning(10, RealInterval(-100, 100)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(10))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)"]
assert h.axis[0].binning.toSparseRegularBinning().toCategoryBinning().categories == ["[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)"]
h = Histogram([Axis(RegularBinning(10, RealInterval(-100, 100), overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.below1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(12))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-inf, -100)", "[100, +inf]", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[-inf, -100)", "[100, +inf]", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-inf, -100)", "[100, +inf]", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)"]
h = Histogram([Axis(RegularBinning(10, RealInterval(-100, 100), overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(12))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-inf, -100)", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf]"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[-inf, -100)", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf]"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-inf, -100)", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf]"]
h = Histogram([Axis(RegularBinning(10, RealInterval(-100, 100), overflow=RealOverflow(loc_underflow=RealOverflow.above2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(12))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf]", "[-inf, -100)"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf]", "[-inf, -100)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf]", "[-inf, -100)"]
h = Histogram([Axis(RegularBinning(10, RealInterval(-100, 100), overflow=RealOverflow(loc_underflow=RealOverflow.below1, loc_overflow=RealOverflow.above1, loc_nanflow=RealOverflow.above2, minf_mapping=RealOverflow.in_nanflow, pinf_mapping=RealOverflow.in_nanflow)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(13))))
assert h.axis[0].binning.toCategoryBinning().categories == ["(-inf, -100)", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf)", "{-inf, +inf, nan}"]
assert h.axis[0].binning.toEdgesBinning().toCategoryBinning().categories == ["(-inf, -100)", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf)", "{-inf, +inf, nan}"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["(-inf, -100)", "[-100, -80)", "[-80, -60)", "[-60, -40)", "[-40, -20)", "[-20, 0)", "[0, 20)", "[20, 40)", "[40, 60)", "[60, 80)", "[80, 100)", "[100, +inf)", "{-inf, +inf, nan}"]
def test_binning_EdgesBinning(self):
h = Histogram([Axis(EdgesBinning([3, 4.5, 10, 20]))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[3, 4.5)", "[4.5, 10)", "[10, 20)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[3, 4.5)", "[4.5, 10)", "[10, 20)"]
h = Histogram([Axis(EdgesBinning([3, 4.5, 10, 20], overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.below1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-inf, 3)", "[20, +inf]", "[3, 4.5)", "[4.5, 10)", "[10, 20)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-inf, 3)", "[20, +inf]", "[3, 4.5)", "[4.5, 10)", "[10, 20)"]
h = Histogram([Axis(EdgesBinning([3, 4.5, 10, 20], overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-inf, 3)", "[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf]"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-inf, 3)", "[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf]"]
h = Histogram([Axis(EdgesBinning([3, 4.5, 10, 20], overflow=RealOverflow(loc_underflow=RealOverflow.above2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf]", "[-inf, 3)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf]", "[-inf, 3)"]
h = Histogram([Axis(EdgesBinning([3, 4.5, 10, 20], overflow=RealOverflow(loc_underflow=RealOverflow.below1, loc_overflow=RealOverflow.above1, loc_nanflow=RealOverflow.above2, minf_mapping=RealOverflow.in_nanflow, pinf_mapping=RealOverflow.in_nanflow)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(6))))
assert h.axis[0].binning.toCategoryBinning().categories == ["(-inf, 3)", "[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf)", "{-inf, +inf, nan}"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["(-inf, 3)", "[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf)", "{-inf, +inf, nan}"]
def test_binning_IrregularBinning(self):
h = Histogram([Axis(IrregularBinning([RealInterval(3, 4.5), RealInterval(4.5, 10), RealInterval(10, 20)]))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(3))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[3, 4.5)", "[4.5, 10)", "[10, 20)"]
h = Histogram([Axis(IrregularBinning([RealInterval(3, 4.5), RealInterval(4.5, 10), RealInterval(10, 20)], overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.below1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-inf, 3)", "[20, +inf]", "[3, 4.5)", "[4.5, 10)", "[10, 20)"]
h = Histogram([Axis(IrregularBinning([RealInterval(3, 4.5), RealInterval(4.5, 10), RealInterval(10, 20)], overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-inf, 3)", "[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf]"]
h = Histogram([Axis(IrregularBinning([RealInterval(3, 4.5), RealInterval(4.5, 10), RealInterval(10, 20)], overflow=RealOverflow(loc_underflow=RealOverflow.above2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf]", "[-inf, 3)"]
h = Histogram([Axis(IrregularBinning([RealInterval(3, 4.5), RealInterval(4.5, 10), RealInterval(10, 20)], overflow=RealOverflow(loc_underflow=RealOverflow.below1, loc_overflow=RealOverflow.above1, loc_nanflow=RealOverflow.above2, minf_mapping=RealOverflow.in_nanflow, pinf_mapping=RealOverflow.in_nanflow)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(6))))
assert h.axis[0].binning.toCategoryBinning().categories == ["(-inf, 3)", "[3, 4.5)", "[4.5, 10)", "[10, 20)", "[20, +inf)", "{-inf, +inf, nan}"]
def test_binning_SparseRegularBinning(self):
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.1))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(5))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-29.9, -19.9)", "[60.1, 70.1)", "[100.1, 110.1)", "[110.1, 120.1)", "[120.1, 130.1)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["[-29.9, -19.9)", "[60.1, 70.1)", "[100.1, 110.1)", "[110.1, 120.1)", "[120.1, 130.1)"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0, overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.below1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(7))))
assert h.axis[0].binning.toCategoryBinning().categories == ["{-inf}", "{+inf}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0, overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(7))))
assert h.axis[0].binning.toCategoryBinning().categories == ["{-inf}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)", "{+inf}"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0, overflow=RealOverflow(loc_underflow=RealOverflow.above2, loc_overflow=RealOverflow.above1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(7))))
assert h.axis[0].binning.toCategoryBinning().categories == ["[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)", "{+inf}", "{-inf}"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0, overflow=RealOverflow(loc_underflow=RealOverflow.below2, loc_overflow=RealOverflow.above1, loc_nanflow=RealOverflow.above2)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(8))))
assert h.axis[0].binning.toCategoryBinning().categories == ["{-inf}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)", "{+inf}", "{nan}"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0, overflow=RealOverflow(loc_nanflow=RealOverflow.below1)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(6))))
assert h.axis[0].binning.toCategoryBinning().categories == ["{nan}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["{nan}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0, overflow=RealOverflow(loc_nanflow=RealOverflow.below1, minf_mapping=RealOverflow.in_nanflow, pinf_mapping=RealOverflow.in_nanflow)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(6))))
assert h.axis[0].binning.toCategoryBinning().categories == ["{-inf, +inf, nan}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["{-inf, +inf, nan}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
h = Histogram([Axis(SparseRegularBinning([-3, 6, 10, 11, 12], 10, 0.0, overflow=RealOverflow(loc_nanflow=RealOverflow.below1, minf_mapping=RealOverflow.in_nanflow, pinf_mapping=RealOverflow.in_nanflow, nan_mapping=RealOverflow.missing)))], UnweightedCounts(InterpretedInlineBuffer.fromarray(numpy.arange(6))))
assert h.axis[0].binning.toCategoryBinning().categories == ["{-inf, +inf}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
assert h.axis[0].binning.toIrregularBinning().toCategoryBinning().categories == ["{-inf, +inf}", "[-30, -20)", "[60, 70)", "[100, 110)", "[110, 120)", "[120, 130)"]
| 130.416185
| 387
| 0.596977
| 3,201
| 22,562
| 4.184005
| 0.064355
| 0.035541
| 0.05585
| 0.060927
| 0.903457
| 0.895468
| 0.893228
| 0.876652
| 0.866647
| 0.857687
| 0
| 0.149549
| 0.124812
| 22,562
| 172
| 388
| 131.174419
| 0.528715
| 0.066749
| 0
| 0.091743
| 0
| 0
| 0.280531
| 0
| 0
| 0
| 0
| 0
| 0.623853
| 1
| 0.055046
| false
| 0.009174
| 0.027523
| 0
| 0.091743
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b73fa850a4141cf3244508b7982d22d19a78b553
| 135
|
py
|
Python
|
dsb/bot.py
|
Firemoon777/dialog-sticker-bot
|
6a7b5670d573fd78c8639c06f22c66d2088ec568
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2021-12-28T14:03:15.000Z
|
2022-01-02T02:26:33.000Z
|
dsb/bot.py
|
Firemoon777/dialog-sticker-bot
|
6a7b5670d573fd78c8639c06f22c66d2088ec568
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-12-30T10:29:29.000Z
|
2021-12-30T10:29:29.000Z
|
dsb/bot.py
|
Firemoon777/dialog-sticker-bot
|
6a7b5670d573fd78c8639c06f22c66d2088ec568
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2022-02-05T12:17:45.000Z
|
2022-02-05T12:17:45.000Z
|
from telegram.error import BadRequest
from telegram.ext import Updater, ConversationHandler
class DialogStickerBot(Updater):
pass
| 22.5
| 53
| 0.82963
| 15
| 135
| 7.466667
| 0.733333
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125926
| 135
| 6
| 54
| 22.5
| 0.949153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b743fa598bda6f21e45781c9a04d964ec12572fc
| 17,980
|
py
|
Python
|
tests/test_completer_data.py
|
amzyang/kafka-shell
|
1ffcfcf07d46a401d338b64b612e5b32cdffb257
|
[
"Apache-2.0"
] | 109
|
2019-03-20T02:05:55.000Z
|
2022-01-21T07:52:31.000Z
|
tests/test_completer_data.py
|
amzyang/kafka-shell
|
1ffcfcf07d46a401d338b64b612e5b32cdffb257
|
[
"Apache-2.0"
] | 20
|
2019-03-20T02:13:43.000Z
|
2020-09-19T04:10:46.000Z
|
tests/test_completer_data.py
|
amzyang/kafka-shell
|
1ffcfcf07d46a401d338b64b612e5b32cdffb257
|
[
"Apache-2.0"
] | 11
|
2019-04-26T09:28:29.000Z
|
2021-12-04T14:54:11.000Z
|
from __future__ import unicode_literals
command_test_data = [
(
"",
["version", "cluster-select", "cluster-describe", "exit", "clear", "kafka-acls", "kafka-avro-console-consumer",
"kafka-avro-console-producer", "kafka-replica-verification", "kafka-preferred-replica-election",
"kafka-broker-api-versions", "kafka-configs", "kafka-console-consumer", "kafka-console-producer", "kafka-reassign-partitions",
"kafka-consumer-groups", "kafka-delete-records", "kafka-dump-log", "kafka-log-dirs", "kafka-topics",
"kafka-verifiable-consumer", "kafka-verifiable-producer", "ksql", "zookeeper-shell"]
),
(
"kafka",
["kafka-acls", "kafka-avro-console-consumer", "kafka-avro-console-producer", "kafka-broker-api-versions",
"kafka-configs", "kafka-console-consumer", "kafka-console-producer", "kafka-consumer-groups", "kafka-reassign-partitions",
"kafka-delete-records", "kafka-dump-log", "kafka-log-dirs", "kafka-topics", "kafka-verifiable-consumer",
"kafka-verifiable-producer", "kafka-replica-verification", "kafka-preferred-replica-election"]
),
(
"k",
["ksql", "kafka-acls", "kafka-avro-console-consumer", "kafka-avro-console-producer",
"kafka-broker-api-versions", "kafka-replica-verification", "kafka-preferred-replica-election",
"kafka-configs", "kafka-console-consumer", "kafka-console-producer", "kafka-consumer-groups", "kafka-reassign-partitions",
"kafka-delete-records", "kafka-dump-log", "kafka-log-dirs", "kafka-topics", "kafka-verifiable-consumer",
"kafka-verifiable-producer", "zookeeper-shell"]
),
(
"ksq",
["ksql"]
),
(
"zookeeper",
["zookeeper-shell"]
),
(
"kafka-topics",
["kafka-topics"]
),
(
"cluster-",
["cluster-select", "cluster-describe"]
),
(
"this-command-does-not-exist",
[]
)
]
option_test_data = [
(
"kafka-topics ",
["--alter", "--config", "--create", "--delete", "--delete-config", "--describe", "--disable-rack-aware",
"--exclude-internal", "--force", "--help", "--if-exists", "--if-not-exists", "--list", "--partitions",
"--replica-assignment", "--replication-factor", "--topic", "--topics-with-overrides",
"--unavailable-partitions", "--under-replicated-partitions", "--zookeeper"]
),
(
"kafka-configs ",
["--add-config", "--alter", "--bootstrap-server", "--command-config", "--delete-config", "--describe",
"--entity-default", "--entity-name", "--entity-type", "--force", "--help", "--zookeeper"]
),
(
"kafka-console-consumer ",
["--bootstrap-server", "--consumer-property", "--consumer.config", "--enable-systest-events", "--formatter",
"--from-beginning", "--group", "--isolation-level", "--key-deserializer", "--max-messages", "--offset",
"--partition", "--property", "--skip-message-on-error", "--timeout-ms", "--topic", "--value-deserializer",
"--whitelist"]
),
(
"kafka-console-consumer --group test ",
["--bootstrap-server", "--consumer-property", "--consumer.config", "--enable-systest-events", "--formatter",
"--from-beginning", "--isolation-level", "--key-deserializer", "--max-messages", "--offset",
"--partition", "--property", "--skip-message-on-error", "--timeout-ms", "--topic", "--value-deserializer",
"--whitelist"]
),
(
"kafka-console-consumer --group test --consumer-property print.key=true ",
["--bootstrap-server", "--consumer-property", "--consumer.config", "--enable-systest-events", "--formatter",
"--from-beginning", "--isolation-level", "--key-deserializer", "--max-messages", "--offset",
"--partition", "--property", "--skip-message-on-error", "--timeout-ms", "--topic", "--value-deserializer",
"--whitelist"]
),
(
"kafka-console-consumer --group test --consumer-property print.key=true --for",
["--formatter"]
),
(
"ksql ",
["--", "--config-file", "--help", "--output", "--query-row-limit", "--query-timeout"]
),
(
"zookeeper-shell ",
[]
),
(
"cluster-select l",
["local"]
),
(
"cluster-describe ",
["local"]
)
]
option_value_test_data = [
(
"kafka-configs --add-config ",
["SCRAM-SHA-256", "SCRAM-SHA-512", "advertised.listeners", "background.threads", "cleanup.policy",
"compression.type", "consumer_byte_rate", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
"flush.ms", "follower.replication.throttled.rate", "follower.replication.throttled.replicas",
"index.interval.bytes", "leader.replication.throttled.rate", "leader.replication.throttled.replicas",
"listener.security.protocol.map", "listeners", "log.cleaner.backoff.ms", "log.cleaner.dedupe.buffer.size",
"log.cleaner.delete.retention.ms", "log.cleaner.io.buffer.load.factor", "log.cleaner.io.buffer.size",
"log.cleaner.io.max.bytes.per.second", "log.cleaner.min.cleanable.ratio", "log.cleaner.min.compaction.lag.ms",
"log.cleaner.threads", "log.cleanup.policy", "log.flush.interval.messages", "log.flush.interval.ms",
"log.index.interval.bytes", "log.index.size.max.bytes", "log.message.downconversion.enable",
"log.message.timestamp.difference.max.ms", "log.message.timestamp.type", "log.preallocate",
"log.retention.bytes", "log.retention.ms", "log.roll.jitter.ms", "log.roll.ms", "log.segment.bytes",
"log.segment.delete.delay.ms", "max.connections.per.ip", "max.connections.per.ip.overrides",
"max.message.bytes", "message.downconversion.enable", "message.format.version", "message.max.bytes",
"message.timestamp.difference.max.ms", "message.timestamp.type", "metric.reporters",
"min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "num.io.threads",
"num.network.threads", "num.recovery.threads.per.data.dir", "num.replica.fetchers", "preallocate",
"principal.builder.class", "producer_byte_rate", "request_percentage", "retention.bytes", "retention.ms",
"sasl.enabled.mechanisms", "sasl.jaas.config", "sasl.kerberos.kinit.cmd",
"sasl.kerberos.min.time.before.relogin", "sasl.kerberos.principal.to.local.rules",
"sasl.kerberos.service.name", "sasl.kerberos.ticket.renew.jitter", "sasl.kerberos.ticket.renew.window.factor",
"sasl.login.refresh.buffer.seconds", "sasl.login.refresh.min.period.seconds",
"sasl.login.refresh.window.factor", "sasl.login.refresh.window.jitter", "sasl.mechanism.inter.broker.protocol",
"segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms", "ssl.cipher.suites",
"ssl.client.auth", "ssl.enabled.protocols", "ssl.endpoint.identification.algorithm", "ssl.key.password",
"ssl.keymanager.algorithm", "ssl.keystore.location", "ssl.keystore.password", "ssl.keystore.type",
"ssl.protocol", "ssl.provider", "ssl.secure.random.implementation", "ssl.trustmanager.algorithm",
"ssl.truststore.location", "ssl.truststore.password", "ssl.truststore.type", "unclean.leader.election.enable"]
),
(
"kafka-configs --entity-type ",
["broker", "client", "topic", "user"]
),
(
"kafka-configs --entity-type broker --add-config ",
["advertised.listeners", "background.threads", "compression.type", "follower.replication.throttled.rate",
"leader.replication.throttled.rate", "listener.security.protocol.map", "listeners", "log.cleaner.backoff.ms",
"log.cleaner.dedupe.buffer.size", "log.cleaner.delete.retention.ms", "log.cleaner.io.buffer.load.factor",
"log.cleaner.io.buffer.size", "log.cleaner.io.max.bytes.per.second", "log.cleaner.min.cleanable.ratio",
"log.cleaner.min.compaction.lag.ms", "log.cleaner.threads", "log.cleanup.policy",
"log.flush.interval.messages", "log.flush.interval.ms", "log.index.interval.bytes", "log.index.size.max.bytes",
"log.message.downconversion.enable", "log.message.timestamp.difference.max.ms", "log.message.timestamp.type",
"log.preallocate", "log.retention.bytes", "log.retention.ms", "log.roll.jitter.ms", "log.roll.ms",
"log.segment.bytes", "log.segment.delete.delay.ms", "max.connections.per.ip",
"max.connections.per.ip.overrides", "message.max.bytes", "metric.reporters", "min.insync.replicas",
"num.io.threads", "num.network.threads", "num.recovery.threads.per.data.dir", "num.replica.fetchers",
"principal.builder.class", "sasl.enabled.mechanisms", "sasl.jaas.config", "sasl.kerberos.kinit.cmd",
"sasl.kerberos.min.time.before.relogin", "sasl.kerberos.principal.to.local.rules",
"sasl.kerberos.service.name", "sasl.kerberos.ticket.renew.jitter", "sasl.kerberos.ticket.renew.window.factor",
"sasl.login.refresh.buffer.seconds", "sasl.login.refresh.min.period.seconds",
"sasl.login.refresh.window.factor", "sasl.login.refresh.window.jitter", "sasl.mechanism.inter.broker.protocol",
"ssl.cipher.suites", "ssl.client.auth", "ssl.enabled.protocols", "ssl.endpoint.identification.algorithm",
"ssl.key.password", "ssl.keymanager.algorithm", "ssl.keystore.location", "ssl.keystore.password",
"ssl.keystore.type", "ssl.protocol", "ssl.provider", "ssl.secure.random.implementation",
"ssl.trustmanager.algorithm", "ssl.truststore.location", "ssl.truststore.password", "ssl.truststore.type",
"unclean.leader.election.enable"]
),
(
"kafka-configs --entity-type broker --delete-config ",
["advertised.listeners", "background.threads", "compression.type", "follower.replication.throttled.rate",
"leader.replication.throttled.rate", "listener.security.protocol.map", "listeners", "log.cleaner.backoff.ms",
"log.cleaner.dedupe.buffer.size", "log.cleaner.delete.retention.ms", "log.cleaner.io.buffer.load.factor",
"log.cleaner.io.buffer.size", "log.cleaner.io.max.bytes.per.second", "log.cleaner.min.cleanable.ratio",
"log.cleaner.min.compaction.lag.ms", "log.cleaner.threads", "log.cleanup.policy",
"log.flush.interval.messages", "log.flush.interval.ms", "log.index.interval.bytes", "log.index.size.max.bytes",
"log.message.downconversion.enable", "log.message.timestamp.difference.max.ms", "log.message.timestamp.type",
"log.preallocate", "log.retention.bytes", "log.retention.ms", "log.roll.jitter.ms", "log.roll.ms",
"log.segment.bytes", "log.segment.delete.delay.ms", "max.connections.per.ip",
"max.connections.per.ip.overrides", "message.max.bytes", "metric.reporters", "min.insync.replicas",
"num.io.threads", "num.network.threads", "num.recovery.threads.per.data.dir", "num.replica.fetchers",
"principal.builder.class", "sasl.enabled.mechanisms", "sasl.jaas.config", "sasl.kerberos.kinit.cmd",
"sasl.kerberos.min.time.before.relogin", "sasl.kerberos.principal.to.local.rules",
"sasl.kerberos.service.name", "sasl.kerberos.ticket.renew.jitter", "sasl.kerberos.ticket.renew.window.factor",
"sasl.login.refresh.buffer.seconds", "sasl.login.refresh.min.period.seconds",
"sasl.login.refresh.window.factor", "sasl.login.refresh.window.jitter", "sasl.mechanism.inter.broker.protocol",
"ssl.cipher.suites", "ssl.client.auth", "ssl.enabled.protocols", "ssl.endpoint.identification.algorithm",
"ssl.key.password", "ssl.keymanager.algorithm", "ssl.keystore.location", "ssl.keystore.password",
"ssl.keystore.type", "ssl.protocol", "ssl.provider", "ssl.secure.random.implementation",
"ssl.trustmanager.algorithm", "ssl.truststore.location", "ssl.truststore.password", "ssl.truststore.type",
"unclean.leader.election.enable"]
),
(
"kafka-configs --entity-type topic --add-config ",
["cleanup.policy", "compression.type", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
"flush.ms", "follower.replication.throttled.replicas", "index.interval.bytes",
"leader.replication.throttled.replicas", "max.message.bytes", "message.downconversion.enable",
"message.format.version", "message.timestamp.difference.max.ms", "message.timestamp.type",
"min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "preallocate", "retention.bytes",
"retention.ms", "segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms",
"unclean.leader.election.enable"]
),
(
"kafka-configs --entity-type user --add-config ",
["SCRAM-SHA-256", "SCRAM-SHA-512", "consumer_byte_rate", "producer_byte_rate", "request_percentage"]
),
(
"kafka-configs --entity-type client --add-config ",
["consumer_byte_rate", "producer_byte_rate", "request_percentage"]
),
(
"kafka-configs --entity-type client --delete-config ",
["consumer_byte_rate", "producer_byte_rate", "request_percentage"]
),
(
"kafka-topics --config ",
["cleanup.policy", "compression.type", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
"flush.ms", "follower.replication.throttled.replicas", "index.interval.bytes",
"leader.replication.throttled.replicas", "max.message.bytes", "message.downconversion.enable",
"message.format.version", "message.timestamp.difference.max.ms", "message.timestamp.type",
"min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "preallocate", "retention.bytes",
"retention.ms", "segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms",
"unclean.leader.election.enable"]
),
(
"kafka-topics --delete-config ",
["cleanup.policy", "compression.type", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
"flush.ms", "follower.replication.throttled.replicas", "index.interval.bytes",
"leader.replication.throttled.replicas", "max.message.bytes", "message.downconversion.enable",
"message.format.version", "message.timestamp.difference.max.ms", "message.timestamp.type",
"min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "preallocate", "retention.bytes",
"retention.ms", "segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms",
"unclean.leader.election.enable"]
),
(
"kafka-configs --add-config cleanup.polic",
["cleanup.policy", "log.cleanup.policy"]
),
(
"kafka-configs --add-config cleanup.policy=",
["compact", "delete"]
),
(
"kafka-configs --add-config log.cleanup.policy=",
["compact", "delete"]
),
(
"kafka-configs --add-config log.cleanup.policy=comp",
["compact"]
),
(
"kafka-configs --add-config ssl.protocol=",
[]
),
(
"kafka-configs --add-config log.message.timestamp.type=",
["CreateTime", "LogAppendTime"]
),
(
"kafka-configs --add-config log.message.timestamp.type=Create",
["CreateTime"]
),
(
"kafka-configs --add-config log.message.timestamp.type=asdf",
[]
),
(
"kafka-configs --add-config compression.type=",
["gzip", "lz4", "none", "snappy", "zstd"]
),
(
"kafka-configs --delete-config compression.type=",
["gzip", "lz4", "none", "snappy", "zstd"]
),
(
"kafka-configs --add-config compression.type=z",
["zstd", "gzip", "lz4"]
),
(
"kafka-topics --config compression.type=",
["gzip", "lz4", "none", "snappy", "zstd"]
),
(
"kafka-topics --config message.timestamp.type=",
["CreateTime", "LogAppendTime"]
),
(
"kafka-topics --delete-config message.timestamp.type=",
["CreateTime", "LogAppendTime"]
),
(
"ksql --output ",
["JSON", "TABULAR"]
),
(
"ksql --output JS",
["JSON"]
),
(
"kafka-console-producer --request-required-acks ",
["-1", "0", "1", "all"]
),
(
"kafka-avro-console-producer --request-required-acks ",
["-1", "0", "1", "all"]
),
(
"kafka-verifiable-producer --acks ",
["-1", "0", "1", "all"]
),
(
"kafka-verifiable-consumer --reset-policy ",
["earliest", "latest", "none"]
),
(
"kafka-acls --resource-pattern-type ",
["ANY", "LITERAL", "MATCH", "PREFIXED"]
),
(
"kafka-console-producer --compression-codec ",
["gzip", "lz4", "none", "snappy", "zstd"]
),
(
"kafka-configs --add-config unclean.leader.election.enable=",
["true", "false"]
),
(
"kafka-configs --delete-config unclean.leader.election.enable=fal",
["false"]
),
(
"kafka-topics --config unclean.leader.election.enable=",
["true", "false"]
),
(
"kafka-topics --delete-config unclean.leader.election.enable=",
["true", "false"]
),
(
"kafka-configs --add-config log.preallocate=",
["true", "false"]
),
(
"kafka-configs --delete-config log.preallocate=tr",
["true"]
),
(
"kafka-topics --config preallocate=",
["true", "false"]
),
(
"kafka-topics --delete-config preallocate=",
["true", "false"]
),
(
"kafka-configs --add-config log.message.downconversion.enable=",
["true", "false"]
),
(
"kafka-topics --config message.downconversion.enable=",
["true", "false"]
)
]
| 50.083565
| 135
| 0.617408
| 1,891
| 17,980
| 5.853517
| 0.132734
| 0.030355
| 0.020327
| 0.028458
| 0.869817
| 0.850574
| 0.822658
| 0.81543
| 0.774686
| 0.756618
| 0
| 0.001771
| 0.18337
| 17,980
| 358
| 136
| 50.223464
| 0.752094
| 0
| 0
| 0.476056
| 0
| 0
| 0.699666
| 0.37069
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.019718
| 0.002817
| 0
| 0.002817
| 0.005634
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f9ebb7fc689b9f7cd84deae81cef752a8c2f2b6
| 25,817
|
py
|
Python
|
moztrap/model/library/migrations/0003_auto__add_field_suite_cc_version__add_field_suitecase_cc_version__add_.py
|
yifanjiang/moztrap
|
2130c7101b7596b19a2697ab5f1c745e93e7c95b
|
[
"BSD-2-Clause"
] | 1
|
2015-02-10T15:09:42.000Z
|
2015-02-10T15:09:42.000Z
|
moztrap/model/library/migrations/0003_auto__add_field_suite_cc_version__add_field_suitecase_cc_version__add_.py
|
yifanjiang/moztrap
|
2130c7101b7596b19a2697ab5f1c745e93e7c95b
|
[
"BSD-2-Clause"
] | null | null | null |
moztrap/model/library/migrations/0003_auto__add_field_suite_cc_version__add_field_suitecase_cc_version__add_.py
|
yifanjiang/moztrap
|
2130c7101b7596b19a2697ab5f1c745e93e7c95b
|
[
"BSD-2-Clause"
] | null | null | null |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Suite.cc_version'
db.add_column('library_suite', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
# Adding field 'SuiteCase.cc_version'
db.add_column('library_suitecase', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
# Adding field 'CaseVersion.cc_version'
db.add_column('library_caseversion', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
# Adding field 'CaseAttachment.cc_version'
db.add_column('library_caseattachment', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
# Adding field 'Case.cc_version'
db.add_column('library_case', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
# Adding field 'CaseStep.cc_version'
db.add_column('library_casestep', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
def backwards(self, orm):
# Deleting field 'Suite.cc_version'
db.delete_column('library_suite', 'cc_version')
# Deleting field 'SuiteCase.cc_version'
db.delete_column('library_suitecase', 'cc_version')
# Deleting field 'CaseVersion.cc_version'
db.delete_column('library_caseversion', 'cc_version')
# Deleting field 'CaseAttachment.cc_version'
db.delete_column('library_caseattachment', 'cc_version')
# Deleting field 'Case.cc_version'
db.delete_column('library_case', 'cc_version')
# Deleting field 'CaseStep.cc_version'
db.delete_column('library_casestep', 'cc_version')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.product': {
'Meta': {'ordering': "['name']", 'object_name': 'Product'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 190426)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'has_team': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 190624)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'own_team': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'})
},
'core.productversion': {
'Meta': {'ordering': "['product', 'order']", 'object_name': 'ProductVersion'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 185878)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'productversion'", 'symmetrical': 'False', 'to': "orm['environments.Environment']"}),
'has_team': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 186074)'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'own_team': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'versions'", 'to': "orm['core.Product']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'", '_ormbases': ['auth.User'], 'proxy': 'True'}
},
'environments.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 196774)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 196972)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'environments.element': {
'Meta': {'ordering': "['name']", 'object_name': 'Element'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'elements'", 'to': "orm['environments.Category']"}),
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 189436)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 189627)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'environments.environment': {
'Meta': {'object_name': 'Environment'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 200292)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'elements': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'environments'", 'symmetrical': 'False', 'to': "orm['environments.Element']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 200493)'}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'environments'", 'null': 'True', 'to': "orm['environments.Profile']"})
},
'environments.profile': {
'Meta': {'object_name': 'Profile'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 197684)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 197880)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'library.case': {
'Meta': {'object_name': 'Case'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 192679)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 192871)'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cases'", 'to': "orm['core.Product']"})
},
'library.caseattachment': {
'Meta': {'object_name': 'CaseAttachment'},
'attachment': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'caseversion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['library.CaseVersion']"}),
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 187537)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 187745)'})
},
'library.casestep': {
'Meta': {'ordering': "['caseversion', 'number']", 'object_name': 'CaseStep'},
'caseversion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'steps'", 'to': "orm['library.CaseVersion']"}),
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 191525)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'expected': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instruction': ('django.db.models.fields.TextField', [], {}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 191712)'}),
'number': ('django.db.models.fields.IntegerField', [], {})
},
'library.caseversion': {
'Meta': {'ordering': "['case', 'productversion__order']", 'object_name': 'CaseVersion'},
'case': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'versions'", 'to': "orm['library.Case']"}),
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 198592)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'caseversion'", 'symmetrical': 'False', 'to': "orm['environments.Environment']"}),
'envs_narrowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 198795)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'productversion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'caseversions'", 'to': "orm['core.ProductVersion']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'draft'", 'max_length': '30', 'db_index': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'caseversions'", 'blank': 'True', 'to': "orm['tags.Tag']"})
},
'library.suite': {
'Meta': {'object_name': 'Suite'},
'cases': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'suites'", 'symmetrical': 'False', 'through': "orm['library.SuiteCase']", 'to': "orm['library.Case']"}),
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 194131)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 194340)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'suites'", 'to': "orm['core.Product']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'draft'", 'max_length': '30', 'db_index': 'True'})
},
'library.suitecase': {
'Meta': {'ordering': "['order']", 'object_name': 'SuiteCase'},
'case': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'suitecases'", 'to': "orm['library.Case']"}),
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 195643)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 195852)'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'suite': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'suitecases'", 'to': "orm['library.Suite']"})
},
'tags.tag': {
'Meta': {'object_name': 'Tag'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 188495)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 1, 12, 188686)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['library']
| 91.875445
| 199
| 0.571368
| 2,745
| 25,817
| 5.261202
| 0.064481
| 0.099155
| 0.172552
| 0.246503
| 0.864769
| 0.82904
| 0.793657
| 0.784448
| 0.752112
| 0.683562
| 0
| 0.024643
| 0.173219
| 25,817
| 280
| 200
| 92.203571
| 0.651956
| 0.017547
| 0
| 0.387097
| 0
| 0
| 0.61499
| 0.312939
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008065
| false
| 0.004032
| 0.016129
| 0
| 0.03629
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3fe075a57f2da0fcb6bf36e670e78e8226b8fece
| 21,071
|
py
|
Python
|
django_facebook/south_migrations/0001_initial.py
|
abendleiter/Django-facebook
|
5314fea1d7b95b45071c982234e0c1364453ab64
|
[
"BSD-3-Clause"
] | null | null | null |
django_facebook/south_migrations/0001_initial.py
|
abendleiter/Django-facebook
|
5314fea1d7b95b45071c982234e0c1364453ab64
|
[
"BSD-3-Clause"
] | null | null | null |
django_facebook/south_migrations/0001_initial.py
|
abendleiter/Django-facebook
|
5314fea1d7b95b45071c982234e0c1364453ab64
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from django.conf import settings
'''
Support for Django custom user models
See this blog post for inspiration
http://kevindias.com/writing/django-custom-user-models-south-and-reusable-apps/
https://github.com/stephenmcd/mezzanine/blob/master/mezzanine/core/migrations/0005_auto__chg_field_sitepermission_user__del_unique_sitepermission_user.py
'''
from django_facebook.utils import get_migration_data
User, user_model_label, user_orm_label = get_migration_data()
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'FacebookUser'
db.create_table('django_facebook_facebookuser', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user_id', self.gf('django.db.models.fields.IntegerField')()),
('facebook_id', self.gf('django.db.models.fields.BigIntegerField')()),
('name', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('gender', self.gf('django.db.models.fields.CharField')(max_length=1, null=True, blank=True)),
))
db.send_create_signal('django_facebook', ['FacebookUser'])
# Adding unique constraint on 'FacebookUser', fields ['user_id', 'facebook_id']
db.create_unique('django_facebook_facebookuser', ['user_id', 'facebook_id'])
# Adding model 'FacebookLike'
db.create_table('django_facebook_facebooklike', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user_id', self.gf('django.db.models.fields.IntegerField')()),
('facebook_id', self.gf('django.db.models.fields.BigIntegerField')()),
('name', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('category', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('created_time', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('django_facebook', ['FacebookLike'])
# Adding unique constraint on 'FacebookLike', fields ['user_id', 'facebook_id']
db.create_unique('django_facebook_facebooklike', ['user_id', 'facebook_id'])
# Adding model 'FacebookProfile'
db.create_table('django_facebook_facebookprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('about_me', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('facebook_id', self.gf('django.db.models.fields.BigIntegerField')(unique=True, null=True, blank=True)),
('access_token', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('facebook_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('facebook_profile_url', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('website_url', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('blog_url', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('date_of_birth', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('gender', self.gf('django.db.models.fields.CharField')(max_length=1, null=True, blank=True)),
('raw_data', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('facebook_open_graph', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('new_token_required', self.gf('django.db.models.fields.BooleanField')(default=False)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=255, null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm[user_orm_label], unique=True)),
))
db.send_create_signal('django_facebook', ['FacebookProfile'])
if getattr(settings, 'AUTH_USER_MODEL', None) == 'django_facebook.FacebookCustomUser':
# Adding model 'FacebookCustomUser'
db.create_table('django_facebook_facebookcustomuser', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('username', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_joined', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('about_me', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('facebook_id', self.gf('django.db.models.fields.BigIntegerField')(unique=True, null=True, blank=True)),
('access_token', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('facebook_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('facebook_profile_url', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('website_url', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('blog_url', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('date_of_birth', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('gender', self.gf('django.db.models.fields.CharField')(max_length=1, null=True, blank=True)),
('raw_data', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('facebook_open_graph', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('new_token_required', self.gf('django.db.models.fields.BooleanField')(default=False)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=255, null=True, blank=True)),
('state', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
))
db.send_create_signal('django_facebook', ['FacebookCustomUser'])
# Adding M2M table for field groups on 'FacebookCustomUser'
m2m_table_name = db.shorten_name('django_facebook_facebookcustomuser_groups')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('facebookcustomuser', models.ForeignKey(orm['django_facebook.facebookcustomuser'], null=False)),
('group', models.ForeignKey(orm['auth.group'], null=False))
))
db.create_unique(m2m_table_name, ['facebookcustomuser_id', 'group_id'])
# Adding M2M table for field user_permissions on 'FacebookCustomUser'
m2m_table_name = db.shorten_name('django_facebook_facebookcustomuser_user_permissions')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('facebookcustomuser', models.ForeignKey(orm['django_facebook.facebookcustomuser'], null=False)),
('permission', models.ForeignKey(orm['auth.permission'], null=False))
))
db.create_unique(m2m_table_name, ['facebookcustomuser_id', 'permission_id'])
# Adding model 'OpenGraphShare'
db.create_table('django_facebook_open_graph_share', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[user_orm_label])),
('action_domain', self.gf('django.db.models.fields.CharField')(max_length=255)),
('facebook_user_id', self.gf('django.db.models.fields.BigIntegerField')()),
('share_dict', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'], null=True, blank=True)),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('error_message', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('last_attempt', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, blank=True)),
('retry_count', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('share_id', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('completed_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('removed_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
))
db.send_create_signal('django_facebook', ['OpenGraphShare'])
def backwards(self, orm):
# Removing unique constraint on 'FacebookLike', fields ['user_id', 'facebook_id']
db.delete_unique('django_facebook_facebooklike', ['user_id', 'facebook_id'])
# Removing unique constraint on 'FacebookUser', fields ['user_id', 'facebook_id']
db.delete_unique('django_facebook_facebookuser', ['user_id', 'facebook_id'])
# Deleting model 'FacebookUser'
db.delete_table('django_facebook_facebookuser')
# Deleting model 'FacebookLike'
db.delete_table('django_facebook_facebooklike')
# Deleting model 'FacebookProfile'
db.delete_table('django_facebook_facebookprofile')
# Deleting model 'FacebookCustomUser'
db.delete_table('django_facebook_facebookcustomuser')
# Removing M2M table for field groups on 'FacebookCustomUser'
db.delete_table(db.shorten_name('django_facebook_facebookcustomuser_groups'))
# Removing M2M table for field user_permissions on 'FacebookCustomUser'
db.delete_table(db.shorten_name('django_facebook_facebookcustomuser_user_permissions'))
# Deleting model 'OpenGraphShare'
db.delete_table('django_facebook_open_graph_share')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'access_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'blog_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook_open_graph': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'facebook_profile_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'new_token_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'raw_data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': "orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'django_facebook.facebooklike': {
'Meta': {'unique_together': "(['user_id', 'facebook_id'],)", 'object_name': 'FacebookLike'},
'category': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.IntegerField', [], {})
},
'django_facebook.facebookprofile': {
'Meta': {'object_name': 'FacebookProfile'},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'access_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'blog_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook_open_graph': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'facebook_profile_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'new_token_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'raw_data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['%s']" % user_orm_label, 'unique': 'True'}),
'website_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'django_facebook.facebookuser': {
'Meta': {'unique_together': "(['user_id', 'facebook_id'],)", 'object_name': 'FacebookUser'},
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.IntegerField', [], {})
},
'django_facebook.opengraphshare': {
'Meta': {'object_name': 'OpenGraphShare', 'db_table': "'django_facebook_open_graph_share'"},
'action_domain': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'completed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'error_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'facebook_user_id': ('django.db.models.fields.BigIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_attempt': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'removed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'retry_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'share_dict': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'share_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label})
}
}
complete_apps = ['django_facebook']
| 76.621818
| 194
| 0.616772
| 2,366
| 21,071
| 5.329248
| 0.079459
| 0.092632
| 0.160996
| 0.229994
| 0.841542
| 0.813942
| 0.795543
| 0.770799
| 0.717503
| 0.659291
| 0
| 0.005889
| 0.178017
| 21,071
| 274
| 195
| 76.90146
| 0.722113
| 0.04295
| 0
| 0.412556
| 0
| 0
| 0.500328
| 0.31834
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008969
| false
| 0.008969
| 0.026906
| 0
| 0.049327
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b745a9bcedd8d9dcda3b1442f312040d0cb0e93d
| 2,373
|
py
|
Python
|
web/test/test_util.py
|
epmoyer/cascade
|
79b877d5b19567be2d08c00f5cdc31c8968db4c7
|
[
"MIT"
] | null | null | null |
web/test/test_util.py
|
epmoyer/cascade
|
79b877d5b19567be2d08c00f5cdc31c8968db4c7
|
[
"MIT"
] | null | null | null |
web/test/test_util.py
|
epmoyer/cascade
|
79b877d5b19567be2d08c00f5cdc31c8968db4c7
|
[
"MIT"
] | null | null | null |
import pytest
from cascade.util import make_json, get_requirement_id
@pytest.mark.parametrize("req_text, req_id", [
# Positives
('[ABC-DEF-123,X]', 'ABC-DEF-123'),
('[ABC-DEF-123, X, GUI-796]', 'ABC-DEF-123'),
('[ABC-DEF-123, X, GUI-796]', 'ABC-DEF-123'),
(' [ ABC-DEF-123 , X, GUI-796, X, X, X]', 'ABC-DEF-123'),
(' [ ABC-DEF-1 , X, GUI-796]', 'ABC-DEF-1'),
(' [ ABC-DEF-123,2xy] ', 'ABC-DEF-123'),
(' [ ABC-DEF-123, 2xy] ', 'ABC-DEF-123'),
(' \t[ \tABC-DEF-123,\t 2xy \t]\t ', 'ABC-DEF-123'),
# Negatives
('[ABC-DEF-,X]', None),
('[ABC-DEF-?,X]', None),
('[ABC-DEF-??,X]', None),
('[ABC-DEF-G,X]', None),
('[ABC-DEF-GH,X]', None),
('[ABC]', None),
('[ABC-DEF]', None),
('[ABC-DEF-123]', None),
('[ABC-DEF-123,]', None),
('[ SRD-RCN-art-09-796]', None),
('[ SRD-RCN-123,2wf', None),
('[ SRD-RCN-123,2wf]abc', None),
('[ SRD-RCN-123,2wf] abc', None),
('[ABC-DEF-123,X]\nLine2\nLine3', None),
('Line1\n[ABC-DEF-123,X]\nLine3', None),
])
def test_get_requirement_id_strict(req_text, req_id):
r = get_requirement_id(req_text, fuzzy=False)
assert r == req_id
@pytest.mark.parametrize("req_text, req_id", [
# Positives
('[ABC-DEF-123,X]', 'ABC-DEF-123'),
('[ABC-DEF-123, X, GUI-796]', 'ABC-DEF-123'),
('[ABC-DEF-123, X, GUI-796]', 'ABC-DEF-123'),
(' [ ABC-DEF-123 , X, GUI-796, X, X, X]', 'ABC-DEF-123'),
(' [ ABC-DEF-1 , X, GUI-796]', 'ABC-DEF-1'),
(' [ ABC-DEF-123,2xy] ', 'ABC-DEF-123'),
(' [ ABC-DEF-123, 2xy] ', 'ABC-DEF-123'),
(' \t[ \tABC-DEF-123,\t 2xy \t]\t ', 'ABC-DEF-123'),
('[ABC-DEF-,X]', 'ABC-DEF-'),
('[ABC-DEF-?,X]', 'ABC-DEF-?'),
('[ABC-DEF-??,X]', 'ABC-DEF-??'),
('[ABC-DEF-G,X]', 'ABC-DEF-G'),
('[ABC-DEF-GH,X]', 'ABC-DEF-GH'),
# Negatives
('[ABC]', None),
('[ABC-DEF]', None),
('[ABC-DEF-123]', None),
('[ABC-DEF-123,]', None),
('[ SRD-RCN-art-09-796]', None),
('[ SRD-RCN-123,2wf', None),
('[ SRD-RCN-123,2wf]abc', None),
('[ SRD-RCN-123,2wf] abc', None),
('[ABC-DEF-123,X]\nLine2\nLine3', None),
('Line1\n[ABC-DEF-123,X]\nLine3', None),
])
def test_get_requirement_id_fuzzy(req_text, req_id):
r = get_requirement_id(req_text, fuzzy=True)
assert r == req_id
| 34.391304
| 62
| 0.502739
| 368
| 2,373
| 3.168478
| 0.122283
| 0.283019
| 0.262436
| 0.102916
| 0.862779
| 0.862779
| 0.862779
| 0.862779
| 0.862779
| 0.862779
| 0
| 0.098396
| 0.211968
| 2,373
| 69
| 63
| 34.391304
| 0.525134
| 0.016435
| 0
| 0.724138
| 0
| 0
| 0.507515
| 0.051282
| 0
| 0
| 0
| 0
| 0.034483
| 1
| 0.034483
| false
| 0
| 0.034483
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b78c551e28ee1e4ffb39988c516835f3ab7015a6
| 92
|
py
|
Python
|
job-template/job/pkgs/exceptions/__init__.py
|
jollyshuai/cube-studio
|
02ee737801f37a78a1b2e49c844c8401b41d9c48
|
[
"Apache-2.0"
] | 1
|
2022-03-19T14:10:26.000Z
|
2022-03-19T14:10:26.000Z
|
job-template/job/pkgs/exceptions/__init__.py
|
jollyshuai/cube-studio
|
02ee737801f37a78a1b2e49c844c8401b41d9c48
|
[
"Apache-2.0"
] | null | null | null |
job-template/job/pkgs/exceptions/__init__.py
|
jollyshuai/cube-studio
|
02ee737801f37a78a1b2e49c844c8401b41d9c48
|
[
"Apache-2.0"
] | null | null | null |
from .tdw_exceptions import TDWFailedException
from .tdw_exceptions import TDWNoResException
| 46
| 46
| 0.902174
| 10
| 92
| 8.1
| 0.6
| 0.17284
| 0.419753
| 0.567901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076087
| 92
| 2
| 47
| 46
| 0.952941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4d500db2f300578873683a115b79d0e545d04a3b
| 15,769
|
py
|
Python
|
constants.py
|
momennaas/kalam-lp
|
fdf032ca71a155169f507cba40275ca38f409c87
|
[
"MIT"
] | 6
|
2019-03-31T04:46:27.000Z
|
2020-02-27T16:39:31.000Z
|
constants.py
|
momennaas/kalam-lp
|
fdf032ca71a155169f507cba40275ca38f409c87
|
[
"MIT"
] | null | null | null |
constants.py
|
momennaas/kalam-lp
|
fdf032ca71a155169f507cba40275ca38f409c87
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
##############################################################
## Author: Abdulmumen Naas
## Description: Arabic Natural Language Processor (Kalam-lp)
## Version: 0.0.1
## Copyright (c) 2014 Abdulmumen Naas
#############################################################
from alefba import *
#Constantns
#DEMOSTRATIVES
DEM = (ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_HEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_WAW_WITH_HAMZA_ABOVE+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF+ARABIC_LETTER_HAMZA,
)
DEMLOC = (ARABIC_LETTER_HEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF+ARABIC_LETTER_KAF,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_KAF)
#Relatives nouns
REL = (ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_THAL+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH,)
#Personal Pronouns
PPRON1 = (ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_NOON+ARABIC_LETTER_HAH+ARABIC_LETTER_NOON)
PPRON2 = (ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM,)
PPRON3 = (ARABIC_LETTER_HEH+ARABIC_LETTER_WAW,
ARABIC_LETTER_HEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON,)
#Possessive Pronouns
POSPRON = (ARABIC_LETTER_YEH,
ARABIC_LETTER_KAF,
ARABIC_LETTER_HEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF,
ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM,
ARABIC_LETTER_KAF+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM)
POSPRON1 = (ARABIC_LETTER_YEH,
ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF)
POSPRON2 = (ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM,
ARABIC_LETTER_KAF+ARABIC_LETTER_NOON,)
POSPRON3 = (ARABIC_LETTER_HEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM)
CE_SF = (ARABIC_LETTER_TEH_MARBUTA,)
CE_DM = (ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,)
CE_DF = (ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,)
CE_PM = (ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,)
CE_PF = (ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH,)
#Prepositions
PREP = (ARABIC_PREP_MEN,
ARABIC_PREP_ELA,
ARABIC_PREP_HATTA,
ARABIC_PREP_KHALA,
ARABIC_PREP_HASHA,
ARABIC_PREP_ADA,
ARABIC_PREP_FE,
ARABIC_PREP_AN,
ARABIC_PREP_ALA,
ARABIC_PREP_MUTH,
ARABIC_PREP_MUNTHO,
ARABIC_PREP_KAY,
ARABIC_PREP_WAW,
ARABIC_PREP_TA,
ARABIC_PREP_KAF,
ARABIC_PREP_BA,
ARABIC_PREP_LALLA,
ARABIC_PREP_MATA)
PVSOLO = (ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_ALEF,
ARABIC_LETTER_TEH,
ARABIC_LETTER_NOON,
ARABIC_LETTER_YEH,)
#Kana and sisters
KANA = (ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_LAM+ARABIC_LETTER_YEH+ARABIC_LETTER_SEEN,
ARABIC_LETTER_SAD+ARABIC_LETTER_ALEF+ARABIC_LETTER_REH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_SAD+ARABIC_LETTER_BEH+ARABIC_LETTER_HAH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_SAD+ARABIC_LETTER_BEH+ARABIC_LETTER_HAH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_DAD+ARABIC_LETTER_HAH+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_DAD+ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_MEEM+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_MEEM+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ZAH+ARABIC_LETTER_LAM,
ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH,)
#Conjunctions
CONJ = (ARABIC_LETTER_WAW,
ARABIC_LETTER_THEH+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_WAW,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_WAW,
ARABIC_LETTER_FEH)
#Accusative(INNA)
ACC = (ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_PREP_LALLA,
ARABIC_LETTER_LAM+ARABIC_LETTER_KAF+ARABIC_LETTER_NOON,
ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON,
ARABIC_LETTER_LAM+ARABIC_LETTER_YEH+ARABIC_LETTER_TEH)
#Expceptions
EXP = (ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_GHAIN,ARABIC_LETTER_YEH+ARABIC_LETTER_REH,
ARABIC_PREP_KHALA,
ARABIC_PREP_ADA,
ARABIC_PREP_HASHA,
ARABIC_LETTER_LAM+ARABIC_LETTER_YEH+ARABIC_LETTER_SEEN,)
#Interogative
INTG = ()
#Negative
NEG = (
ARABIC_LETTER_LAM+ARABIC_LETTER_MEEM,
ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,)
#Conditional
COND = (ARABIC_LETTER_LAM+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_MEEM+ARABIC_LETTER_NOON,
ARABIC_LETTER_MEEM+ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_PREP_MATA,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_KAF+ARABIC_LETTER_YEH+ARABIC_LETTER_FEH,
ARABIC_LETTER_HAH+ARABIC_LETTER_YEH+ARABIC_LETTER_THEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_THAL+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THAL+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THAL,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_THAL,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH
)
#Vocals particles
VOC = (ARABIC_LETTER_ALEF_WITH_MADDA_ABOVE,
ARABIC_LETTER_YEH+ARABIC_LETTER_ALEF)
TENN = (ARABIC_LETTER_ALEF+ARABIC_LETTER_SEEN+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_BEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_BEH+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH_MARBUTA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_BEH+ARABIC_LETTER_NOON+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_MEEM+ARABIC_LETTER_WAW_WITH_HAMZA_ABOVE,
ARABIC_LETTER_ALEF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_TEH_MARBUTA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_NOON+" "+ARABIC_WORD_ALLAH)
DEM_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in DEM]
DEMLOC_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in DEM]
REL_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in REL]
PPRON1_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PPRON1]
PPRON2_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PPRON2]
PPRON3_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PPRON3]
POSPRON_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON]
PREP_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PREP]
#PSOLO_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PSOLO]
PVSOLO_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PVSOLO]
CONJ_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CONJ]
ACC_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in ACC]
NEG_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in NEG]
COND_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in COND]
EXP_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in EXP]
VOC_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in VOC]
TENN_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in TENN]
KANA_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in KANA]
#Noun-specific Prefixes
#CONJ+P+AL
PRFX_CONJPAL = (
ARABIC_LETTER_WAW+ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_WAW+ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_WAW+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM,)
#CONJ+AL
PRFX_CONJAL = (
ARABIC_LETTER_WAW+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
)
#P+AL
PRFX_PAL = (ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_LAM+ARABIC_LETTER_LAM,)
#CONJ+P
PRFX_CONJP = (ARABIC_LETTER_WAW+ARABIC_LETTER_BEH,
ARABIC_LETTER_WAW+ARABIC_LETTER_KAF,
ARABIC_LETTER_WAW+ARABIC_LETTER_LAM,
ARABIC_LETTER_WAW+ARABIC_LETTER_FEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_BEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_KAF,
ARABIC_LETTER_FEH+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_FEH,)
#AL
PRFX_AL = (ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,)
#CONJ
PRFX_CONJ = (ARABIC_LETTER_WAW,ARABIC_LETTER_FEH,)
#P
PRFX_P = (ARABIC_LETTER_BEH,
ARABIC_LETTER_KAF,
ARABIC_LETTER_LAM,)
##Verb specific Prefix
#PV+SEEN+CONJ
PRFX_CONJSPV = (ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_YEH,
ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_NOON,
ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_YEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_NOON,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH)
#PV+CONJ
PRFX_CONJPV = (ARABIC_LETTER_WAW+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_WAW+ARABIC_LETTER_TEH,
ARABIC_LETTER_WAW+ARABIC_LETTER_NOON,
ARABIC_LETTER_WAW+ARABIC_LETTER_YEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_ALEF,
ARABIC_LETTER_FEH+ARABIC_LETTER_TEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_FEH+ARABIC_LETTER_YEH,)
#PV+SEEN
PRFX_SEENPV = (ARABIC_LETTER_SEEN+ARABIC_LETTER_NOON,
ARABIC_LETTER_SEEN+ARABIC_LETTER_YEH,
ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH,
ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,)
#SEEN
PRFX_SEEN = (ARABIC_LETTER_SEEN,)
#PV
PRFX_PV = (ARABIC_LETTER_TEH,
ARABIC_LETTER_YEH,
ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,)
#SAWFA
PRFX_SAWFA = (ARABIC_LETTER_SEEN+ARABIC_LETTER_WAW+ARABIC_LETTER_FEH,)
#------------Prefixes Regex----------------------
###Noun-Specific###
CONJPAL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJPAL]
CONJAL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJAL]
AL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_AL]
CONJP_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJP]
PAL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_PAL]
CONJ_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJ]
P_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_P]
###Verb-Specific###
CONJSPV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJSPV]
CONJPV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJPV]
SEENPV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_SEENPV]
PV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_PV]
SEEN_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_SEEN]
SAWFA_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_SAWFA]
#------------Suffixes Regex--------------------------
###Possessives Pronouns###
POSPRON1_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON1]
POSPRON2_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON2]
POSPRON3_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON3]
###Case Ending###
CE_SF_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_SF]
CE_DM_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_DM]
CE_DF_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_DF]
CE_PM_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_PM]
CE_PF_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_PF]
| 45.443804
| 133
| 0.782041
| 2,393
| 15,769
| 4.648558
| 0.061011
| 0.528587
| 0.185545
| 0.185904
| 0.876483
| 0.845739
| 0.816523
| 0.804387
| 0.648418
| 0.62073
| 0
| 0.001869
| 0.117826
| 15,769
| 346
| 134
| 45.575145
| 0.797786
| 0.04382
| 0
| 0.110204
| 0
| 0
| 0.005295
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004082
| 0
| 0.004082
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4d7dcd1fa6464ebbccd2436d5dd6a31686677319
| 6,436
|
py
|
Python
|
lib/datasets/__init__.py
|
CFM-MSG/Code_LEORN
|
fabea1e1ded973a4db692e51e2df442bde55f626
|
[
"MIT"
] | 1
|
2022-01-31T03:23:37.000Z
|
2022-01-31T03:23:37.000Z
|
lib/datasets/__init__.py
|
CFM-MSG/Code_LEORN
|
fabea1e1ded973a4db692e51e2df442bde55f626
|
[
"MIT"
] | null | null | null |
lib/datasets/__init__.py
|
CFM-MSG/Code_LEORN
|
fabea1e1ded973a4db692e51e2df442bde55f626
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from core.config import config
def collate_fn(batch):
batch_word_vectors = [b['word_vectors'] for b in batch]
batch_txt_mask = [b['txt_mask'] for b in batch]
batch_map_gt = [b['map_gt'] for b in batch]
batch_anno_idxs = [b['anno_idx'] for b in batch]
batch_vis_feats = [b['visual_input'] for b in batch]
batch_duration = [b['duration'] for b in batch]
batch_reg_gt = [b['reg_gt'] for b in batch]
batch_description = [b['description'] for b in batch]
max_num_clips = max([map_gt.shape[-1] for map_gt in batch_map_gt])
padded_batch_map_gt = torch.zeros(len(batch_map_gt), 1, max_num_clips, max_num_clips) # batchsize * 1 * 16 * 16
for i, map_gt in enumerate(batch_map_gt):
num_clips = map_gt.shape[-1]
padded_batch_map_gt[i][0, :num_clips, :num_clips] = map_gt
batch_data = {
'batch_anno_idxs': batch_anno_idxs,
'batch_word_vectors': nn.utils.rnn.pad_sequence(batch_word_vectors, batch_first=True),
'batch_txt_mask': nn.utils.rnn.pad_sequence(batch_txt_mask, batch_first=True),
'batch_map_gt': padded_batch_map_gt,
'batch_vis_input': nn.utils.rnn.pad_sequence(batch_vis_feats, batch_first=True).float(),
'batch_duration': batch_duration,
'batch_reg_gt': batch_reg_gt,
'batch_description': batch_description,
}
return batch_data
def orcnn_collate_fn(batch):
batch_word_vectors = [b['word_vectors'] for b in batch]
batch_txt_mask = [b['txt_mask'] for b in batch]
batch_map_gt = [b['map_gt'] for b in batch]
batch_anno_idxs = [b['anno_idx'] for b in batch]
batch_rcnn_feats = [b['rcnn_input'] for b in batch]
batch_rcnn_mask = [b['rcnn_mask'] for b in batch]
batch_rcnn_bbox = [b['rcnn_bbox'] for b in batch]
batch_duration = [b['duration'] for b in batch]
batch_reg_gt = [b['reg_gt'] for b in batch]
batch_description = [b['description'] for b in batch]
max_num_clips = max([map_gt.shape[-1] for map_gt in batch_map_gt])
padded_batch_map_gt = torch.zeros(len(batch_map_gt), 1, max_num_clips, max_num_clips) # batchsize * 1 * 16 * 16
for i, map_gt in enumerate(batch_map_gt):
num_clips = map_gt.shape[-1]
padded_batch_map_gt[i][0, :num_clips, :num_clips] = map_gt
batch_data = {
'batch_anno_idxs': batch_anno_idxs,
'batch_word_vectors': nn.utils.rnn.pad_sequence(batch_word_vectors, batch_first=True),
'batch_txt_mask': nn.utils.rnn.pad_sequence(batch_txt_mask, batch_first=True),
'batch_map_gt': padded_batch_map_gt,
'batch_rcnn_input': nn.utils.rnn.pad_sequence(batch_rcnn_feats, batch_first=True).float(),
'batch_rcnn_mask': nn.utils.rnn.pad_sequence(batch_rcnn_mask, batch_first=True),
'batch_rcnn_bbox': nn.utils.rnn.pad_sequence(batch_rcnn_bbox, batch_first=True).float(),
'batch_duration': batch_duration,
'batch_reg_gt': nn.utils.rnn.pad_sequence(batch_reg_gt, batch_first=True).float(),
'batch_description': batch_description,
}
return batch_data
def frcnn_collate_fn(batch):
batch_word_vectors = [b['word_vectors'] for b in batch]
batch_txt_mask = [b['txt_mask'] for b in batch]
batch_map_gt = [b['map_gt'] for b in batch]
batch_anno_idxs = [b['anno_idx'] for b in batch]
batch_vis_feats = [b['visual_input'] for b in batch]
batch_rcnn_feats = [b['rcnn_input'] for b in batch]
batch_rcnn_mask = [b['rcnn_mask'] for b in batch]
batch_rcnn_bbox = [b['rcnn_bbox'] for b in batch]
batch_duration = [b['duration'] for b in batch]
batch_reg_gt = [b['reg_gt'] for b in batch]
batch_description = [b['description'] for b in batch]
max_num_clips = max([map_gt.shape[-1] for map_gt in batch_map_gt])
padded_batch_map_gt = torch.zeros(len(batch_map_gt), 1, max_num_clips, max_num_clips) # batchsize * 1 * 16 * 16
for i, map_gt in enumerate(batch_map_gt):
num_clips = map_gt.shape[-1]
padded_batch_map_gt[i][0, :num_clips, :num_clips] = map_gt
batch_data = {
'batch_anno_idxs': batch_anno_idxs,
'batch_word_vectors': nn.utils.rnn.pad_sequence(batch_word_vectors, batch_first=True),
'batch_txt_mask': nn.utils.rnn.pad_sequence(batch_txt_mask, batch_first=True),
'batch_map_gt': padded_batch_map_gt,
'batch_vis_input': nn.utils.rnn.pad_sequence(batch_vis_feats, batch_first=True).float(),
'batch_rcnn_input': nn.utils.rnn.pad_sequence(batch_rcnn_feats, batch_first=True).float(),
'batch_rcnn_mask': nn.utils.rnn.pad_sequence(batch_rcnn_mask, batch_first=True),
'batch_rcnn_bbox': nn.utils.rnn.pad_sequence(batch_rcnn_bbox, batch_first=True).float(),
'batch_duration': batch_duration,
'batch_reg_gt': batch_reg_gt,
'batch_description': batch_description,
}
return batch_data
def average_to_fixed_length(visual_input):
num_sample_clips = config.DATASET.NUM_SAMPLE_CLIPS # 256
num_clips = visual_input.shape[0] # frame num
idxs = torch.arange(0, num_sample_clips + 1, 1.0) / num_sample_clips * num_clips
idxs = torch.min(torch.round(idxs).long(), torch.tensor(num_clips - 1))
new_visual_input = []
for i in range(num_sample_clips):
s_idx, e_idx = idxs[i].item(), idxs[i + 1].item()
if s_idx < e_idx:
new_visual_input.append(torch.mean(visual_input[s_idx:e_idx], dim=0))
else:
new_visual_input.append(visual_input[s_idx])
new_visual_input = torch.stack(new_visual_input, dim=0)
return new_visual_input # 256*4096
def sample_to_fixed_length(*args):
assert len(args) > 0
num_sample_clips = config.DATASET.NUM_SAMPLE_CLIPS # 256
num_clips = args[0].shape[0] # frame num
idxs = torch.arange(0, num_sample_clips + 1, 1.0) / num_sample_clips * num_clips
idxs = torch.min(torch.round(idxs).long(), torch.tensor(num_clips - 1))
res_list = [[] for _ in range(len(args))]
for i in range(num_sample_clips):
s_idx, e_idx = idxs[i].item(), idxs[i + 1].item()
if s_idx < e_idx:
for j in range(len(args)):
res_list[j].append(args[j][s_idx + (e_idx - s_idx) // 2])
else:
for j in range(len(args)):
res_list[j].append(args[j][s_idx])
res = ()
for i in range(len(args)):
res = res + (torch.stack(res_list[i], dim=0),)
return res
| 45.64539
| 116
| 0.680236
| 1,052
| 6,436
| 3.810837
| 0.0827
| 0.052382
| 0.043402
| 0.079571
| 0.897481
| 0.888501
| 0.882015
| 0.882015
| 0.869544
| 0.869544
| 0
| 0.011599
| 0.19624
| 6,436
| 141
| 117
| 45.64539
| 0.763387
| 0.016781
| 0
| 0.77686
| 0
| 0
| 0.108386
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 1
| 0.041322
| false
| 0
| 0.024793
| 0
| 0.107438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d81ec71b4f3fa44bee43cee40a44e66995ee5ca
| 5,835
|
py
|
Python
|
menpofit/lucaskanade/appearance/alternating.py
|
bakalogatos/menpofit
|
ca83437d00c09b175f21e9a988378d8f3dca8a9f
|
[
"BSD-3-Clause"
] | null | null | null |
menpofit/lucaskanade/appearance/alternating.py
|
bakalogatos/menpofit
|
ca83437d00c09b175f21e9a988378d8f3dca8a9f
|
[
"BSD-3-Clause"
] | null | null | null |
menpofit/lucaskanade/appearance/alternating.py
|
bakalogatos/menpofit
|
ca83437d00c09b175f21e9a988378d8f3dca8a9f
|
[
"BSD-3-Clause"
] | null | null | null |
from scipy.linalg import norm
import numpy as np
from .base import AppearanceLucasKanade
class AlternatingForwardAdditive(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Alternating-FA'
def _fit(self, fitting_result, max_iters=20):
# Initial error > eps
error = self.eps + 1
image = fitting_result.image
fitting_result.weights = [[0]]
n_iters = 0
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to_mask(self.template.mask, self.transform,
warp_landmarks=False)
# Compute appearance
weights = self.appearance_model.project(IWxp)
self.template = self.appearance_model.instance(weights)
fitting_result.weights.append(weights)
# Compute warp Jacobian
dW_dp = self.transform.d_dp(self.template.mask.true_indices())
# Compute steepest descent images, VI_dW_dp
self._J = self.residual.steepest_descent_images(
image, dW_dp, forward=(self.template, self.transform))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
parameters = self.transform.as_vector() + delta_p
self.transform.from_vector_inplace(parameters)
fitting_result.parameters.append(parameters)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
return fitting_result
class AlternatingForwardCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Alternating-FC'
def _set_up(self):
# Compute warp Jacobian
self._dW_dp = self.transform.d_dp(self.template.mask.true_indices())
def _fit(self, fitting_result, max_iters=20):
# Initial error > eps
error = self.eps + 1
image = fitting_result.image
fitting_result.weights = [[0]]
n_iters = 0
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to_mask(self.template.mask, self.transform,
warp_landmarks=False)
# Compute template by projection
weights = self.appearance_model.project(IWxp)
self.template = self.appearance_model.instance(weights)
fitting_result.weights.append(weights)
# Compute steepest descent images, VI_dW_dp
self._J = self.residual.steepest_descent_images(IWxp, self._dW_dp)
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p)
fitting_result.parameters.append(self.transform.as_vector())
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
return fitting_result
class AlternatingInverseCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Alternating-IC'
def _set_up(self):
# Compute warp Jacobian
self._dW_dp = self.transform.d_dp(self.template.mask.true_indices())
def _fit(self, fitting_result, max_iters=20):
# Initial error > eps
error = self.eps + 1
image = fitting_result.image
fitting_result.weights = [[0]]
n_iters = 0
# Baker-Matthews, Inverse Compositional Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to_mask(self.template.mask, self.transform,
warp_landmarks=False)
# Compute appearance
weights = self.appearance_model.project(IWxp)
self.template = self.appearance_model.instance(weights)
fitting_result.weights.append(weights)
# Compute steepest descent images, VT_dW_dp
self._J = self.residual.steepest_descent_images(self.template,
self._dW_dp)
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, IWxp, self.template)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Request the pesudoinverse vector from the transform
inv_delta_p = self.transform.pseudoinverse_vector(delta_p)
# Update warp weights
self.transform.compose_after_from_vector_inplace(inv_delta_p)
fitting_result.parameters.append(self.transform.as_vector())
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
return fitting_result
| 35.150602
| 78
| 0.624165
| 647
| 5,835
| 5.394127
| 0.159196
| 0.034384
| 0.02063
| 0.046418
| 0.851289
| 0.851289
| 0.851289
| 0.797994
| 0.797994
| 0.78596
| 0
| 0.004431
| 0.303856
| 5,835
| 165
| 79
| 35.363636
| 0.854751
| 0.180463
| 0
| 0.747126
| 0
| 0
| 0.008848
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091954
| false
| 0
| 0.034483
| 0.034483
| 0.229885
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4dba89924dda6a04ce1ee9dda081d71e75585574
| 33,888
|
py
|
Python
|
cinder/tests/unit/volume/drivers/open_e/test_rest.py
|
cloudification-io/cinder
|
23d76e01f2b4f3771b57fb287084a4884238b827
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/volume/drivers/open_e/test_rest.py
|
cloudification-io/cinder
|
23d76e01f2b4f3771b57fb287084a4884238b827
|
[
"Apache-2.0"
] | 1
|
2020-12-22T20:40:20.000Z
|
2020-12-23T18:34:42.000Z
|
cinder/tests/unit/volume/drivers/open_e/test_rest.py
|
cloudification-io/cinder
|
23d76e01f2b4f3771b57fb287084a4884238b827
|
[
"Apache-2.0"
] | 1
|
2019-06-24T20:21:33.000Z
|
2019-06-24T20:21:33.000Z
|
# Copyright (c) 2020 Open-E, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_utils import units as o_units
from cinder import context
from cinder import exception
from cinder.tests.unit import test
from cinder.volume.drivers.open_e.jovian_common import exception as jexc
from cinder.volume.drivers.open_e.jovian_common import jdss_common as jcom
from cinder.volume.drivers.open_e.jovian_common import rest
UUID_1 = '12345678-1234-1234-1234-000000000001'
UUID_2 = '12345678-1234-1234-1234-000000000002'
CONFIG_OK = {
'san_hosts': ['192.168.0.2'],
'san_api_port': 82,
'driver_use_ssl': 'https',
'jovian_rest_send_repeats': 3,
'jovian_recovery_delay': 60,
'san_login': 'admin',
'san_password': 'password',
'jovian_ignore_tpath': [],
'target_port': 3260,
'jovian_pool': 'Pool-0',
'iscsi_target_prefix': 'iqn.2020-04.com.open-e.cinder:',
'chap_password_len': 12,
'san_thin_provision': False,
'jovian_block_size': '128K'
}
def fake_safe_get(value):
return CONFIG_OK[value]
class TestOpenEJovianRESTAPI(test.TestCase):
def get_rest(self, config):
ctx = context.get_admin_context()
cfg = mock.Mock()
cfg.append_config_values.return_value = None
cfg.safe_get = lambda val: config[val]
cfg.get = lambda val, default: config[val]
jdssr = rest.JovianRESTAPI(cfg)
jdssr.rproxy = mock.Mock()
return jdssr, ctx
def start_patches(self, patches):
for p in patches:
p.start()
def stop_patches(self, patches):
for p in patches:
p.stop()
def test_get_active_host(self):
jrest, ctx = self.get_rest(CONFIG_OK)
jrest.rproxy.get_active_host.return_value = "test_data"
ret = jrest.get_active_host()
self.assertEqual("test_data", ret)
def test_is_pool_exists(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'code': 200,
'error': None}
jrest.rproxy.pool_request.return_value = resp
self.assertTrue(jrest.is_pool_exists())
err = {'errorid': 12}
resp = {'code': 404,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertFalse(jrest.is_pool_exists())
pool_request_expected = [
mock.call('GET', ''),
mock.call('GET', '')]
jrest.rproxy.pool_request.assert_has_calls(pool_request_expected)
def get_iface_info(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {
'code': 200,
'error': None}
jrest.rproxy.pool_request.return_value = resp
self.assertTrue(jrest.is_pool_exists())
def test_get_luns(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'data': [{
'vscan': None,
'full_name': 'pool-0/' + UUID_1,
'userrefs': None,
'primarycache': 'all',
'logbias': 'latency',
'creation': '1591543140',
'sync': 'always',
'is_clone': False,
'dedup': 'off',
'sharenfs': None,
'receive_resume_token': None,
'volsize': '1073741824'}],
'error': None,
'code': 200}
jrest.rproxy.pool_request.return_value = resp
self.assertEqual(resp['data'], jrest.get_luns())
err = {'errorid': 12, 'message': 'test failure'}
resp = {'code': 404,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSRESTException, jrest.get_luns)
get_luns_expected = [
mock.call('GET', "/volumes"),
mock.call('GET', "/volumes")]
jrest.rproxy.pool_request.assert_has_calls(get_luns_expected)
def test_create_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'data': {
'vscan': None,
'full_name': 'pool-0/' + jcom.vname(UUID_1),
'userrefs': None,
'primarycache': 'all',
'logbias': 'latency',
'creation': '1591543140',
'sync': 'always',
'is_clone': False,
'dedup': 'off',
'sharenfs': None,
'receive_resume_token': None,
'volsize': '1073741824'},
'error': None,
'code': 200}
jbody = {
'name': jcom.vname(UUID_1),
'size': "1073741824",
'sparse': False
}
jbody_sparse = {
'name': jcom.vname(UUID_1),
'size': "1073741824",
'sparse': True
}
jrest.rproxy.pool_request.return_value = resp
self.assertIsNone(jrest.create_lun(jcom.vname(UUID_1), o_units.Gi))
err = {'errno': '5', 'message': 'test failure'}
resp = {'code': 404,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSRESTException,
jrest.create_lun,
jcom.vname(UUID_1),
o_units.Gi,
sparse=True)
addr = "/volumes"
create_lun_expected = [
mock.call('POST', addr, json_data=jbody),
mock.call('POST', addr, json_data=jbody_sparse)]
jrest.rproxy.pool_request.assert_has_calls(create_lun_expected)
def test_extend_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'data': None,
'error': None,
'code': 201}
jbody = {
'size': "2147483648",
}
jrest.rproxy.pool_request.return_value = resp
self.assertIsNone(jrest.extend_lun(jcom.vname(UUID_1), 2 * o_units.Gi))
err = {'message': 'test failure'}
resp = {'code': 500,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSRESTException,
jrest.extend_lun,
jcom.vname(UUID_1),
2 * o_units.Gi)
addr = "/volumes/" + jcom.vname(UUID_1)
create_lun_expected = [
mock.call('PUT', addr, json_data=jbody),
mock.call('PUT', addr, json_data=jbody)]
jrest.rproxy.pool_request.assert_has_calls(create_lun_expected)
def test_is_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'data': {
"vscan": None,
"full_name": "pool-0/" + jcom.vname(UUID_1),
"userrefs": None,
"primarycache": "all",
"logbias": "latency",
"creation": "1591543140",
"sync": "always",
"is_clone": False,
"dedup": "off",
"sharenfs": None,
"receive_resume_token": None,
"volsize": "1073741824"},
'error': None,
'code': 200}
jrest.rproxy.pool_request.return_value = resp
self.assertTrue(jrest.is_lun(jcom.vname(UUID_1)))
err = {'errno': 1,
'message': ('Zfs resource: Pool-0/' + jcom.vname(UUID_1) +
' not found in this collection.')}
resp = {'code': 500,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertEqual(False, jrest.is_lun(jcom.vname(UUID_1)))
jrest.rproxy.pool_request.side_effect = (
jexc.JDSSRESTProxyException(host='test_host', reason='test'))
self.assertRaises(jexc.JDSSRESTProxyException,
jrest.is_lun,
'v_' + UUID_1)
def test_get_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'data': {"vscan": None,
"full_name": "pool-0/v_" + UUID_1,
"userrefs": None,
"primarycache": "all",
"logbias": "latency",
"creation": "1591543140",
"sync": "always",
"is_clone": False,
"dedup": "off",
"sharenfs": None,
"receive_resume_token": None,
"volsize": "1073741824"},
'error': None,
'code': 200}
jrest.rproxy.pool_request.return_value = resp
self.assertEqual(resp['data'], jrest.get_lun('v_' + UUID_1))
err = {'errno': 1,
'message': ('Zfs resource: Pool-0/v_' + UUID_1 +
' not found in this collection.')}
resp = {'code': 500,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.get_lun,
'v_' + UUID_1)
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.get_lun,
'v_' + UUID_1)
err = {'errno': 10,
'message': ('Test error')}
resp = {'code': 500,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSException, jrest.get_lun, 'v_' + UUID_1)
def test_modify_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'data': None,
'error': None,
'code': 201}
req = {'name': 'v_' + UUID_2}
jrest.rproxy.pool_request.return_value = resp
self.assertIsNone(jrest.modify_lun('v_' + UUID_1, prop=req))
err = {'errno': 1,
'message': ('Zfs resource: Pool-0/v_' + UUID_1 +
' not found in this collection.')}
resp = {'code': 500,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.modify_lun,
'v_' + UUID_1,
prop=req)
err = {'errno': 10,
'message': ('Test error')}
resp = {'code': 500,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(jexc.JDSSException,
jrest.modify_lun,
'v_' + UUID_1,
prop=req)
addr = "/volumes/v_" + UUID_1
modify_lun_expected = [
mock.call('PUT', addr, json_data=req),
mock.call('PUT', addr, json_data=req),
mock.call('PUT', addr, json_data=req)]
jrest.rproxy.pool_request.assert_has_calls(modify_lun_expected)
def test_make_readonly_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
resp = {'data': None,
'error': None,
'code': 201}
req = {'property_name': 'readonly', 'property_value': 'on'}
jrest.rproxy.pool_request.return_value = resp
self.assertIsNone(jrest.modify_lun('v_' + UUID_1, prop=req))
addr = "/volumes/v_" + UUID_1
modify_lun_expected = [mock.call('PUT', addr, json_data=req)]
jrest.rproxy.pool_request.assert_has_calls(modify_lun_expected)
def test_delete_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# Delete OK
resp = {'data': None,
'error': None,
'code': 204}
jrest.rproxy.pool_request.return_value = resp
self.assertIsNone(jrest.delete_lun('v_' + UUID_1))
addr = "/volumes/v_" + UUID_1
delete_lun_expected = [mock.call('DELETE', addr)]
jrest.rproxy.pool_request.assert_has_calls(delete_lun_expected)
# No volume to delete
err = {'errno': 1,
'message': ('Zfs resource: Pool-0/v_' + UUID_1 +
' not found in this collection.')}
resp = {'code': 500,
'data': None,
'error': err}
jrest.rproxy.pool_request.return_value = resp
self.assertIsNone(jrest.delete_lun('v_' + UUID_1))
delete_lun_expected += [mock.call('DELETE', addr)]
jrest.rproxy.pool_request.assert_has_calls(delete_lun_expected)
# Volume has snapshots
msg = ("cannot destroy 'Pool-0/{vol}': volume has children\nuse '-r'"
" to destroy the following datasets:\nPool-0/{vol}@s1")
msg = msg.format(vol='v_' + UUID_1)
url = "http://192.168.0.2:82/api/v3/pools/Pool-0/volumes/" + UUID_1
err = {"class": "zfslib.wrap.zfs.ZfsCmdError",
"errno": 1000,
"message": msg,
"url": url}
resp = {
'code': 500,
'data': None,
'error': err}
delete_lun_expected += [mock.call('DELETE', addr)]
jrest.rproxy.pool_request.return_value = resp
self.assertRaises(
exception.VolumeIsBusy,
jrest.delete_lun,
'v_' + UUID_1)
jrest.rproxy.pool_request.assert_has_calls(delete_lun_expected)
def test_delete_lun_args(self):
jrest, ctx = self.get_rest(CONFIG_OK)
addr = "/volumes/v_" + UUID_1
# Delete OK
resp = {'data': None,
'error': None,
'code': 204}
req = {'recursively_children': True,
'recursively_dependents': True,
'force_umount': True}
delete_lun_expected = [mock.call('DELETE', addr, json_data=req)]
jrest.rproxy.pool_request.return_value = resp
self.assertIsNone(
jrest.delete_lun('v_' + UUID_1,
recursively_children=True,
recursively_dependents=True,
force_umount=True))
jrest.rproxy.pool_request.assert_has_calls(delete_lun_expected)
def test_is_target(self):
jrest, ctx = self.get_rest(CONFIG_OK)
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
addr = '/san/iscsi/targets/{}'.format(tname)
data = {'incoming_users_active': True,
'name': tname,
'allow_ip': [],
'outgoing_user': None,
'active': True,
'conflicted': False,
'deny_ip': []}
resp = {'data': data,
'error': None,
'code': 200}
is_target_expected = [mock.call('GET', addr)]
jrest.rproxy.pool_request.return_value = resp
self.assertTrue(jrest.is_target(tname))
msg = "Target {} not exists.".format(tname)
url = ("http://{addr}:{port}/api/v3/pools/Pool-0/"
"san/iscsi/targets/{target}")
url = url.format(addr=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'],
target=tname)
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": msg,
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
is_target_expected += [mock.call('GET', addr)]
jrest.rproxy.pool_request.return_value = resp
self.assertEqual(False, jrest.is_target(tname))
jrest.rproxy.pool_request.assert_has_calls(is_target_expected)
def test_create_target(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# Create OK
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
addr = '/san/iscsi/targets'
data = {'incoming_users_active': True,
'name': tname,
'allow_ip': [],
'outgoing_user': None,
'active': True,
'conflicted': False,
'deny_ip': []}
resp = {'data': data,
'error': None,
'code': 201}
req = {'name': tname,
'active': True,
'incoming_users_active': True}
jrest.rproxy.pool_request.return_value = resp
create_target_expected = [mock.call('POST', addr, json_data=req)]
self.assertIsNone(jrest.create_target(tname))
# Target exists
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
addr = '/san/iscsi/targets'
data = {'incoming_users_active': True,
'name': tname,
'allow_ip': [],
'outgoing_user': None,
'active': True,
'conflicted': False,
'deny_ip': []}
resp = {'data': data,
'error': None,
'code': 201}
url = ("http://{addr}:{port}/api/v3/pools/Pool-0/"
"san/iscsi/targets")
url = url.format(addr=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'])
msg = "Target with name {} is already present on Pool-0.".format(tname)
err = {"class": "opene.san.target.base.iscsi.TargetNameConflictError",
"message": msg,
"url": url}
resp = {'data': None,
'error': err,
'code': 409}
jrest.rproxy.pool_request.return_value = resp
create_target_expected += [mock.call('POST', addr, json_data=req)]
self.assertRaises(jexc.JDSSResourceExistsException,
jrest.create_target, tname)
# Unknown error
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
addr = "/san/iscsi/targets"
resp = {'data': data,
'error': None,
'code': 500}
url = ("http://{addr}:{port}/api/v3/pools/Pool-0/"
"san/iscsi/targets")
url = url.format(addr=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'])
msg = "Target with name {} faced some fatal failure.".format(tname)
err = {"class": "some test error",
"message": msg,
"url": url,
"errno": 123}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
create_target_expected += [mock.call('POST', addr, json_data=req)]
self.assertRaises(jexc.JDSSException,
jrest.create_target, tname)
jrest.rproxy.pool_request.assert_has_calls(create_target_expected)
def test_delete_target(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# Delete OK
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
addr = '/san/iscsi/targets/{}'.format(tname)
resp = {'data': None,
'error': None,
'code': 204}
jrest.rproxy.pool_request.return_value = resp
delete_target_expected = [mock.call('DELETE', addr)]
self.assertIsNone(jrest.delete_target(tname))
# Delete no such target
url = ("http://{addr}:{port}/api/v3/pools/Pool-0/"
"san/iscsi/targets")
url = url.format(addr=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'])
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": "Target {} not exists.".format(tname),
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
jrest.rproxy.pool_request.return_value = resp
delete_target_expected += [mock.call('DELETE', addr)]
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.delete_target, tname)
# Delete unknown error
err = {"class": "some test error",
"message": "test error message",
"url": url,
"errno": 123}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
delete_target_expected += [mock.call('DELETE', addr)]
self.assertRaises(jexc.JDSSException,
jrest.delete_target, tname)
jrest.rproxy.pool_request.assert_has_calls(delete_target_expected)
def test_create_target_user(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# Modify OK
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
addr = '/san/iscsi/targets/{}/incoming-users'.format(tname)
chap_cred = {"name": "chapuser",
"password": "123456789012"}
resp = {'data': None,
'error': None,
'code': 201}
jrest.rproxy.pool_request.return_value = resp
expected = [mock.call('POST', addr, json_data=chap_cred)]
self.assertIsNone(jrest.create_target_user(tname, chap_cred))
# No such target
url = ("http://{addr}:{port}/api/v3/pools/Pool-0/"
"san/iscsi/targets")
url = url.format(addr=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'])
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": "Target {} not exists.".format(tname),
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
jrest.rproxy.pool_request.return_value = resp
expected += [mock.call('POST', addr, json_data=chap_cred)]
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.create_target_user, tname, chap_cred)
# Unknown error
err = {"class": "some test error",
"message": "test error message",
"url": url,
"errno": 123}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
expected += [mock.call('POST', addr, json_data=chap_cred)]
self.assertRaises(jexc.JDSSException,
jrest.create_target_user, tname, chap_cred)
jrest.rproxy.pool_request.assert_has_calls(expected)
def test_get_target_user(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# Get OK
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
addr = '/san/iscsi/targets/{}/incoming-users'.format(tname)
chap_users = {"name": "chapuser"}
resp = {'data': chap_users,
'error': None,
'code': 200}
jrest.rproxy.pool_request.return_value = resp
get_target_user_expected = [mock.call('GET', addr)]
self.assertEqual(chap_users, jrest.get_target_user(tname))
# No such target
url = ("http://{addr}:{port}/api/v3/pools/Pool-0/"
"san/iscsi/targets")
url = url.format(addr=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'])
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": "Target {} not exists.".format(tname),
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
jrest.rproxy.pool_request.return_value = resp
get_target_user_expected += [mock.call('GET', addr)]
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.get_target_user, tname)
# Unknown error
err = {"class": "some test error",
"message": "test error message",
"url": url,
"errno": 123}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
get_target_user_expected += [mock.call('GET', addr)]
self.assertRaises(jexc.JDSSException,
jrest.get_target_user, tname)
jrest.rproxy.pool_request.assert_has_calls(get_target_user_expected)
def test_delete_target_user(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# Delete OK
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
user = "chapuser"
addr = '/san/iscsi/targets/{}/incoming-users/chapuser'.format(tname)
resp = {'data': None,
'error': None,
'code': 204}
jrest.rproxy.pool_request.return_value = resp
delete_target_user_expected = [mock.call('DELETE', addr)]
self.assertIsNone(jrest.delete_target_user(tname, user))
# No such user
url = ("http://{addr}:{port}/api/v3/pools/Pool-0/"
"san/iscsi/targets/{tname}/incoming-user/{chapuser}")
url = url.format(addr=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'],
tname=tname,
chapuser=user)
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": "User {} not exists.".format(user),
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
jrest.rproxy.pool_request.return_value = resp
delete_target_user_expected += [mock.call('DELETE', addr)]
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.delete_target_user, tname, user)
# Unknown error
err = {"class": "some test error",
"message": "test error message",
"url": url,
"errno": 123}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
delete_target_user_expected += [mock.call('DELETE', addr)]
self.assertRaises(jexc.JDSSException,
jrest.delete_target_user, tname, user)
jrest.rproxy.pool_request.assert_has_calls(delete_target_user_expected)
def test_is_target_lun(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# lun present
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
vname = jcom.vname(UUID_1)
addr = '/san/iscsi/targets/{target}/luns/{lun}'.format(
target=tname, lun=vname)
data = {
"block_size": 512,
"device_handler": "vdisk_fileio",
"lun": 0,
"mode": "wt",
"name": vname,
"prod_id": "Storage",
"scsi_id": "99e2c883331edf87"}
resp = {'data': data,
'error': None,
'code': 200}
jrest.rproxy.pool_request.return_value = resp
is_target_lun_expected = [mock.call('GET', addr)]
self.assertTrue(jrest.is_target_lun(tname, vname))
url = "http://{ip}:{port}/api/v3/pools/Pool-0{addr}"
url = url.format(ip=CONFIG_OK['san_hosts'][0],
port=CONFIG_OK['san_api_port'],
tname=tname,
addr=addr)
msg = "volume name {lun} is not attached to target {target}"
msg = msg.format(lun=vname, target=tname)
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": msg,
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
jrest.rproxy.pool_request.return_value = resp
is_target_lun_expected += [mock.call('GET', addr)]
self.assertEqual(False, jrest.is_target_lun(tname, vname))
err = {"class": "some test error",
"message": "test error message",
"url": url,
"errno": 123}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
is_target_lun_expected += [mock.call('GET', addr)]
self.assertRaises(jexc.JDSSException,
jrest.is_target_lun, tname, vname)
jrest.rproxy.pool_request.assert_has_calls(is_target_lun_expected)
def test_attach_target_vol(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# attach ok
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
vname = jcom.vname(UUID_1)
addr = '/san/iscsi/targets/{}/luns'.format(tname)
jbody = {"name": vname, "lun": 0}
data = {"block_size": 512,
"device_handler": "vdisk_fileio",
"lun": 0,
"mode": "wt",
"name": vname,
"prod_id": "Storage",
"scsi_id": "99e2c883331edf87"}
resp = {'data': data,
'error': None,
'code': 201}
jrest.rproxy.pool_request.return_value = resp
attach_target_vol_expected = [
mock.call('POST', addr, json_data=jbody)]
self.assertIsNone(jrest.attach_target_vol(tname, vname))
# lun attached already
url = 'http://85.14.118.246:11582/api/v3/pools/Pool-0/{}'.format(addr)
msg = 'Volume /dev/Pool-0/{} is already used.'.format(vname)
err = {"class": "opene.exceptions.ItemConflictError",
"message": msg,
"url": url}
resp = {'data': None,
'error': err,
'code': 409}
jrest.rproxy.pool_request.return_value = resp
attach_target_vol_expected += [
mock.call('POST', addr, json_data=jbody)]
self.assertRaises(jexc.JDSSResourceExistsException,
jrest.attach_target_vol, tname, vname)
# no such target
url = 'http://85.14.118.246:11582/api/v3/pools/Pool-0/{}'.format(addr)
msg = 'Target {} not exists.'.format(vname)
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": msg,
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
jrest.rproxy.pool_request.return_value = resp
attach_target_vol_expected += [
mock.call('POST', addr, json_data=jbody)]
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.attach_target_vol, tname, vname)
# error unknown
url = 'http://85.14.118.246:11582/api/v3/pools/Pool-0/{}'.format(addr)
msg = 'Target {} not exists.'.format(vname)
err = {"class": "some test error",
"message": "test error message",
"url": url,
"errno": 123}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
attach_target_vol_expected += [
mock.call('POST', addr, json_data=jbody)]
self.assertRaises(jexc.JDSSException,
jrest.attach_target_vol, tname, vname)
jrest.rproxy.pool_request.assert_has_calls(attach_target_vol_expected)
def test_detach_target_vol(self):
jrest, ctx = self.get_rest(CONFIG_OK)
# detach target vol ok
tname = CONFIG_OK['iscsi_target_prefix'] + UUID_1
vname = jcom.vname(UUID_1)
addr = '/san/iscsi/targets/{tar}/luns/{vol}'.format(
tar=tname, vol=vname)
resp = {'data': None,
'error': None,
'code': 204}
jrest.rproxy.pool_request.return_value = resp
detach_target_vol_expected = [
mock.call('DELETE', addr)]
self.assertIsNone(jrest.detach_target_vol(tname, vname))
# no such target
url = 'http://85.14.118.246:11582/api/v3/pools/Pool-0/{}'.format(addr)
msg = 'Target {} not exists.'.format(vname)
err = {"class": "opene.exceptions.ItemNotFoundError",
"message": msg,
"url": url}
resp = {'data': None,
'error': err,
'code': 404}
jrest.rproxy.pool_request.return_value = resp
detach_target_vol_expected += [
mock.call('DELETE', addr)]
self.assertRaises(jexc.JDSSResourceNotFoundException,
jrest.detach_target_vol, tname, vname)
# error unknown
url = 'http://85.14.118.246:11582/api/v3/pools/Pool-0/{}'.format(addr)
msg = 'Target {} not exists.'.format(vname)
err = {"class": "some test error",
"message": "test error message",
"url": url,
"errno": 125}
resp = {'data': None,
'error': err,
'code': 500}
jrest.rproxy.pool_request.return_value = resp
detach_target_vol_expected += [
mock.call('DELETE', addr)]
self.assertRaises(jexc.JDSSException,
jrest.detach_target_vol, tname, vname)
jrest.rproxy.pool_request.assert_has_calls(detach_target_vol_expected)
| 33.955912
| 79
| 0.533198
| 3,643
| 33,888
| 4.756794
| 0.09031
| 0.045704
| 0.060592
| 0.088868
| 0.828092
| 0.804547
| 0.769519
| 0.74661
| 0.722661
| 0.682094
| 0
| 0.028453
| 0.339353
| 33,888
| 997
| 80
| 33.98997
| 0.745578
| 0.029243
| 0
| 0.71467
| 0
| 0.008075
| 0.177919
| 0.030348
| 0
| 0
| 0
| 0
| 0.095559
| 1
| 0.033647
| false
| 0.004038
| 0.010767
| 0.001346
| 0.048452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
15013a0960c350df165ff8d6d98b9aa71514ede2
| 30,635
|
py
|
Python
|
python_modules/libraries/dagster-gcp/dagster_gcp/dataproc/configs_dataproc_job.py
|
bambielli-flex/dagster
|
30b75ba7c62fc536bc827f177c1dc6ba20f5ae20
|
[
"Apache-2.0"
] | null | null | null |
python_modules/libraries/dagster-gcp/dagster_gcp/dataproc/configs_dataproc_job.py
|
bambielli-flex/dagster
|
30b75ba7c62fc536bc827f177c1dc6ba20f5ae20
|
[
"Apache-2.0"
] | null | null | null |
python_modules/libraries/dagster-gcp/dagster_gcp/dataproc/configs_dataproc_job.py
|
bambielli-flex/dagster
|
30b75ba7c62fc536bc827f177c1dc6ba20f5ae20
|
[
"Apache-2.0"
] | null | null | null |
'''NOTE: THIS FILE IS AUTO-GENERATED. DO NOT EDIT
@generated
Produced via:
python automation/parse_dataproc_configs.py \
'''
from dagster import Bool, Dict, Field, Int, List, PermissiveDict, String
def define_dataproc_job_config():
return Field(
Dict(
fields={
'pysparkJob': Field(
Dict(
fields={
'mainPythonFileUri': Field(
String,
description='''Required. The HCFS URI of the main Python file to use
as the driver. Must be a .py file.''',
is_optional=True,
),
'archiveUris': Field(
List(String),
description='''Optional. HCFS URIs of archives to be extracted in
the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.''',
is_optional=True,
),
'jarFileUris': Field(
List(String),
description='''Optional. HCFS URIs of jar files to add to the
CLASSPATHs of the Python driver and tasks.''',
is_optional=True,
),
'loggingConfig': Field(
Dict(
fields={
'driverLogLevels': Field(
PermissiveDict(),
description='''The per-package log levels for the
driver. This may include "root" package name to
configure rootLogger. Examples: \'com.google = FATAL\',
\'root = INFO\', \'org.apache = DEBUG\'''',
is_optional=True,
)
}
),
description='''The runtime logging config of the job.''',
is_optional=True,
),
'properties': Field(
PermissiveDict(),
description='''Optional. A mapping of property names to values, used
to configure PySpark. Properties that conflict with values set by
the Cloud Dataproc API may be overwritten. Can include properties
set in /etc/spark/conf/spark-defaults.conf and classes in user
code.''',
is_optional=True,
),
'args': Field(
List(String),
description='''Optional. The arguments to pass to the driver. Do not
include arguments, such as --conf, that can be set as job
properties, since a collision may occur that causes an incorrect job
submission.''',
is_optional=True,
),
'fileUris': Field(
List(String),
description='''Optional. HCFS URIs of files to be copied to the
working directory of Python drivers and distributed tasks. Useful
for naively parallel tasks.''',
is_optional=True,
),
'pythonFileUris': Field(
List(String),
description='''Optional. HCFS file URIs of Python files to pass to
the PySpark framework. Supported file types: .py, .egg, and
.zip.''',
is_optional=True,
),
}
),
description='''A Cloud Dataproc job for running Apache PySpark
(https://spark.apache.org/docs/0.9.0/python-programming-guide.html) applications
on YARN.''',
is_optional=True,
),
'reference': Field(
Dict(
fields={
'projectId': Field(
String,
description='''Required. The ID of the Google Cloud Platform project
that the job belongs to.''',
is_optional=True,
),
'jobId': Field(
String,
description='''Optional. The job ID, which must be unique within the
project.The ID must contain only letters (a-z, A-Z), numbers (0-9),
underscores (_), or hyphens (-). The maximum length is 100
characters.If not specified by the caller, the job ID will be
provided by the server.''',
is_optional=True,
),
}
),
description='''Encapsulates the full scoping used to reference a job.''',
is_optional=True,
),
'hadoopJob': Field(
Dict(
fields={
'jarFileUris': Field(
List(String),
description='''Optional. Jar file URIs to add to the CLASSPATHs of
the Hadoop driver and tasks.''',
is_optional=True,
),
'loggingConfig': Field(
Dict(
fields={
'driverLogLevels': Field(
PermissiveDict(),
description='''The per-package log levels for the
driver. This may include "root" package name to
configure rootLogger. Examples: \'com.google = FATAL\',
\'root = INFO\', \'org.apache = DEBUG\'''',
is_optional=True,
)
}
),
description='''The runtime logging config of the job.''',
is_optional=True,
),
'properties': Field(
PermissiveDict(),
description='''Optional. A mapping of property names to values, used
to configure Hadoop. Properties that conflict with values set by the
Cloud Dataproc API may be overwritten. Can include properties set in
/etc/hadoop/conf/*-site and classes in user code.''',
is_optional=True,
),
'args': Field(
List(String),
description='''Optional. The arguments to pass to the driver. Do not
include arguments, such as -libjars or -Dfoo=bar, that can be set as
job properties, since a collision may occur that causes an incorrect
job submission.''',
is_optional=True,
),
'fileUris': Field(
List(String),
description='''Optional. HCFS (Hadoop Compatible Filesystem) URIs of
files to be copied to the working directory of Hadoop drivers and
distributed tasks. Useful for naively parallel tasks.''',
is_optional=True,
),
'mainClass': Field(
String,
description='''The name of the driver\'s main class. The jar file
containing the class must be in the default CLASSPATH or specified
in jar_file_uris.''',
is_optional=True,
),
'archiveUris': Field(
List(String),
description='''Optional. HCFS URIs of archives to be extracted in
the working directory of Hadoop drivers and tasks. Supported file
types: .jar, .tar, .tar.gz, .tgz, or .zip.''',
is_optional=True,
),
'mainJarFileUri': Field(
String,
description='''The HCFS URI of the jar file containing the main
class. Examples:
\'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar\'
\'hdfs:/tmp/test-samples/custom-wordcount.jar\'
\'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar\'''',
is_optional=True,
),
}
),
description='''A Cloud Dataproc job for running Apache Hadoop MapReduce
(https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html)
jobs on Apache Hadoop YARN
(https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).''',
is_optional=True,
),
'status': Field(
Dict(fields={}), description='''Cloud Dataproc job status.''', is_optional=True
),
'placement': Field(
Dict(
fields={
'clusterName': Field(
String,
description='''Required. The name of the cluster where the job will
be submitted.''',
is_optional=True,
)
}
),
description='''Cloud Dataproc job config.''',
is_optional=True,
),
'scheduling': Field(
Dict(
fields={
'maxFailuresPerHour': Field(
Int,
description='''Optional. Maximum number of times per hour a driver
may be restarted as a result of driver terminating with non-zero
code before job is reported failed.A job may be reported as
thrashing if driver exits with non-zero code 4 times within 10
minute window.Maximum value is 10.''',
is_optional=True,
)
}
),
description='''Job scheduling options.''',
is_optional=True,
),
'pigJob': Field(
Dict(
fields={
'queryFileUri': Field(
String,
description='''The HCFS URI of the script that contains the Pig
queries.''',
is_optional=True,
),
'queryList': Field(
Dict(
fields={
'queries': Field(
List(String),
description='''Required. The queries to execute. You do
not need to terminate a query with a semicolon. Multiple
queries can be specified in one string by separating
each with a semicolon. Here is an example of an Cloud
Dataproc API snippet that uses a QueryList to specify a
HiveJob: "hiveJob": { "queryList": { "queries": [
"query1", "query2", "query3;query4", ]
} } ''',
is_optional=True,
)
}
),
description='''A list of queries to run on a cluster.''',
is_optional=True,
),
'jarFileUris': Field(
List(String),
description='''Optional. HCFS URIs of jar files to add to the
CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can
contain Pig UDFs.''',
is_optional=True,
),
'scriptVariables': Field(
PermissiveDict(),
description='''Optional. Mapping of query variable names to values
(equivalent to the Pig command: name=[value]).''',
is_optional=True,
),
'loggingConfig': Field(
Dict(
fields={
'driverLogLevels': Field(
PermissiveDict(),
description='''The per-package log levels for the
driver. This may include "root" package name to
configure rootLogger. Examples: \'com.google = FATAL\',
\'root = INFO\', \'org.apache = DEBUG\'''',
is_optional=True,
)
}
),
description='''The runtime logging config of the job.''',
is_optional=True,
),
'properties': Field(
PermissiveDict(),
description='''Optional. A mapping of property names to values, used
to configure Pig. Properties that conflict with values set by the
Cloud Dataproc API may be overwritten. Can include properties set in
/etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and
classes in user code.''',
is_optional=True,
),
'continueOnFailure': Field(
Bool,
description='''Optional. Whether to continue executing queries if a
query fails. The default value is false. Setting to true can be
useful when executing independent parallel queries.''',
is_optional=True,
),
}
),
description='''A Cloud Dataproc job for running Apache Pig
(https://pig.apache.org/) queries on YARN.''',
is_optional=True,
),
'hiveJob': Field(
Dict(
fields={
'queryFileUri': Field(
String,
description='''The HCFS URI of the script that contains Hive
queries.''',
is_optional=True,
),
'queryList': Field(
Dict(
fields={
'queries': Field(
List(String),
description='''Required. The queries to execute. You do
not need to terminate a query with a semicolon. Multiple
queries can be specified in one string by separating
each with a semicolon. Here is an example of an Cloud
Dataproc API snippet that uses a QueryList to specify a
HiveJob: "hiveJob": { "queryList": { "queries": [
"query1", "query2", "query3;query4", ]
} } ''',
is_optional=True,
)
}
),
description='''A list of queries to run on a cluster.''',
is_optional=True,
),
'jarFileUris': Field(
List(String),
description='''Optional. HCFS URIs of jar files to add to the
CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can
contain Hive SerDes and UDFs.''',
is_optional=True,
),
'scriptVariables': Field(
PermissiveDict(),
description='''Optional. Mapping of query variable names to values
(equivalent to the Hive command: SET name="value";).''',
is_optional=True,
),
'properties': Field(
PermissiveDict(),
description='''Optional. A mapping of property names and values,
used to configure Hive. Properties that conflict with values set by
the Cloud Dataproc API may be overwritten. Can include properties
set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml,
and classes in user code.''',
is_optional=True,
),
'continueOnFailure': Field(
Bool,
description='''Optional. Whether to continue executing queries if a
query fails. The default value is false. Setting to true can be
useful when executing independent parallel queries.''',
is_optional=True,
),
}
),
description='''A Cloud Dataproc job for running Apache Hive
(https://hive.apache.org/) queries on YARN.''',
is_optional=True,
),
'labels': Field(
PermissiveDict(),
description='''Optional. The labels to associate with this job. Label keys must
contain 1 to 63 characters, and must conform to RFC 1035
(https://www.ietf.org/rfc/rfc1035.txt). Label values may be empty, but, if
present, must contain 1 to 63 characters, and must conform to RFC 1035
(https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated
with a job.''',
is_optional=True,
),
'sparkSqlJob': Field(
Dict(
fields={
'queryFileUri': Field(
String,
description='''The HCFS URI of the script that contains SQL
queries.''',
is_optional=True,
),
'queryList': Field(
Dict(
fields={
'queries': Field(
List(String),
description='''Required. The queries to execute. You do
not need to terminate a query with a semicolon. Multiple
queries can be specified in one string by separating
each with a semicolon. Here is an example of an Cloud
Dataproc API snippet that uses a QueryList to specify a
HiveJob: "hiveJob": { "queryList": { "queries": [
"query1", "query2", "query3;query4", ]
} } ''',
is_optional=True,
)
}
),
description='''A list of queries to run on a cluster.''',
is_optional=True,
),
'scriptVariables': Field(
PermissiveDict(),
description='''Optional. Mapping of query variable names to values
(equivalent to the Spark SQL command: SET name="value";).''',
is_optional=True,
),
'jarFileUris': Field(
List(String),
description='''Optional. HCFS URIs of jar files to be added to the
Spark CLASSPATH.''',
is_optional=True,
),
'loggingConfig': Field(
Dict(
fields={
'driverLogLevels': Field(
PermissiveDict(),
description='''The per-package log levels for the
driver. This may include "root" package name to
configure rootLogger. Examples: \'com.google = FATAL\',
\'root = INFO\', \'org.apache = DEBUG\'''',
is_optional=True,
)
}
),
description='''The runtime logging config of the job.''',
is_optional=True,
),
'properties': Field(
PermissiveDict(),
description='''Optional. A mapping of property names to values, used
to configure Spark SQL\'s SparkConf. Properties that conflict with
values set by the Cloud Dataproc API may be overwritten.''',
is_optional=True,
),
}
),
description='''A Cloud Dataproc job for running Apache Spark SQL
(http://spark.apache.org/sql/) queries.''',
is_optional=True,
),
'sparkJob': Field(
Dict(
fields={
'mainJarFileUri': Field(
String,
description='''The HCFS URI of the jar file that contains the main
class.''',
is_optional=True,
),
'jarFileUris': Field(
List(String),
description='''Optional. HCFS URIs of jar files to add to the
CLASSPATHs of the Spark driver and tasks.''',
is_optional=True,
),
'loggingConfig': Field(
Dict(
fields={
'driverLogLevels': Field(
PermissiveDict(),
description='''The per-package log levels for the
driver. This may include "root" package name to
configure rootLogger. Examples: \'com.google = FATAL\',
\'root = INFO\', \'org.apache = DEBUG\'''',
is_optional=True,
)
}
),
description='''The runtime logging config of the job.''',
is_optional=True,
),
'properties': Field(
PermissiveDict(),
description='''Optional. A mapping of property names to values, used
to configure Spark. Properties that conflict with values set by the
Cloud Dataproc API may be overwritten. Can include properties set in
/etc/spark/conf/spark-defaults.conf and classes in user code.''',
is_optional=True,
),
'args': Field(
List(String),
description='''Optional. The arguments to pass to the driver. Do not
include arguments, such as --conf, that can be set as job
properties, since a collision may occur that causes an incorrect job
submission.''',
is_optional=True,
),
'fileUris': Field(
List(String),
description='''Optional. HCFS URIs of files to be copied to the
working directory of Spark drivers and distributed tasks. Useful for
naively parallel tasks.''',
is_optional=True,
),
'mainClass': Field(
String,
description='''The name of the driver\'s main class. The jar file
that contains the class must be in the default CLASSPATH or
specified in jar_file_uris.''',
is_optional=True,
),
'archiveUris': Field(
List(String),
description='''Optional. HCFS URIs of archives to be extracted in
the working directory of Spark drivers and tasks. Supported file
types: .jar, .tar, .tar.gz, .tgz, and .zip.''',
is_optional=True,
),
}
),
description='''A Cloud Dataproc job for running Apache Spark
(http://spark.apache.org/) applications on YARN.''',
is_optional=True,
),
}
),
description='''A Cloud Dataproc job resource.''',
is_optional=True,
)
| 57.584586
| 136
| 0.35812
| 2,121
| 30,635
| 5.136256
| 0.150872
| 0.061502
| 0.086102
| 0.045346
| 0.789701
| 0.766385
| 0.751698
| 0.73747
| 0.727006
| 0.727006
| 0
| 0.004019
| 0.577705
| 30,635
| 531
| 137
| 57.693032
| 0.838061
| 0.003819
| 0
| 0.705202
| 1
| 0.00578
| 0.477809
| 0.012849
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001927
| true
| 0.007707
| 0.001927
| 0.001927
| 0.00578
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1293fb2ca8d8238dceaa6a487761e13e954ceb25
| 115
|
py
|
Python
|
lib/solver_interface/pyoptsolver/pyipopt.py
|
paperstiger/trajOptLib
|
5e86a33537d89c0d1e35df7a436f9266fe817c49
|
[
"MIT"
] | 6
|
2020-04-29T05:02:30.000Z
|
2021-04-19T15:42:35.000Z
|
lib/solver_interface/pyoptsolver/pyipopt.py
|
paperstiger/trajOptLib
|
5e86a33537d89c0d1e35df7a436f9266fe817c49
|
[
"MIT"
] | null | null | null |
lib/solver_interface/pyoptsolver/pyipopt.py
|
paperstiger/trajOptLib
|
5e86a33537d89c0d1e35df7a436f9266fe817c49
|
[
"MIT"
] | null | null | null |
from pyoptsolver import IpoptConfig, OptProblem as IpoptProblem, IpoptSolver
from pyoptsolver import solve_problem
| 38.333333
| 76
| 0.878261
| 13
| 115
| 7.692308
| 0.769231
| 0.3
| 0.42
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104348
| 115
| 2
| 77
| 57.5
| 0.970874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
12d12082a33fa6b379c46dc1844aa0680e93c90d
| 21,921
|
py
|
Python
|
tests/components/homekit/test_aidmanager.py
|
guiguid/core
|
d43617c41d6507f2d2b77aadf4fa1ebaf0058b14
|
[
"Apache-2.0"
] | 1
|
2020-04-07T15:44:54.000Z
|
2020-04-07T15:44:54.000Z
|
tests/components/homekit/test_aidmanager.py
|
guiguid/core
|
d43617c41d6507f2d2b77aadf4fa1ebaf0058b14
|
[
"Apache-2.0"
] | null | null | null |
tests/components/homekit/test_aidmanager.py
|
guiguid/core
|
d43617c41d6507f2d2b77aadf4fa1ebaf0058b14
|
[
"Apache-2.0"
] | 1
|
2020-05-24T07:37:49.000Z
|
2020-05-24T07:37:49.000Z
|
"""Tests for the HomeKit AID manager."""
import os
from zlib import adler32
from asynctest import patch
import pytest
from homeassistant.components.homekit.aidmanager import (
AID_MANAGER_STORAGE_KEY,
AccessoryAidStorage,
get_system_unique_id,
)
from homeassistant.helpers import device_registry
from homeassistant.helpers.storage import STORAGE_DIR
from tests.common import MockConfigEntry, mock_device_registry, mock_registry
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_aid_generation(hass, device_reg, entity_reg):
"""Test generating aids."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
light_ent = entity_reg.async_get_or_create(
"light", "device", "unique_id", device_id=device_entry.id
)
light_ent2 = entity_reg.async_get_or_create(
"light", "device", "other_unique_id", device_id=device_entry.id
)
remote_ent = entity_reg.async_get_or_create(
"remote", "device", "unique_id", device_id=device_entry.id
)
hass.states.async_set(light_ent.entity_id, "on")
hass.states.async_set(light_ent2.entity_id, "on")
hass.states.async_set(remote_ent.entity_id, "on")
hass.states.async_set("remote.has_no_unique_id", "on")
with patch(
"homeassistant.components.homekit.aidmanager.AccessoryAidStorage.async_schedule_save"
):
aid_storage = AccessoryAidStorage(hass)
await aid_storage.async_initialize()
for _ in range(0, 2):
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id)
== 1692141785
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent2.entity_id)
== 2732133210
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(remote_ent.entity_id)
== 1867188557
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id("remote.has_no_unique_id")
== 1872038229
)
aid_storage.delete_aid(get_system_unique_id(light_ent))
aid_storage.delete_aid(get_system_unique_id(light_ent2))
aid_storage.delete_aid(get_system_unique_id(remote_ent))
aid_storage.delete_aid("non-existant-one")
for _ in range(0, 2):
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id)
== 1692141785
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent2.entity_id)
== 2732133210
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id(remote_ent.entity_id)
== 1867188557
)
assert (
aid_storage.get_or_allocate_aid_for_entity_id("remote.has_no_unique_id")
== 1872038229
)
async def test_aid_adler32_collision(hass, device_reg, entity_reg):
"""Test generating aids."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
with patch(
"homeassistant.components.homekit.aidmanager.AccessoryAidStorage.async_schedule_save"
):
aid_storage = AccessoryAidStorage(hass)
await aid_storage.async_initialize()
seen_aids = set()
for unique_id in range(0, 202):
ent = entity_reg.async_get_or_create(
"light", "device", unique_id, device_id=device_entry.id
)
hass.states.async_set(ent.entity_id, "on")
aid = aid_storage.get_or_allocate_aid_for_entity_id(ent.entity_id)
assert aid not in seen_aids
seen_aids.add(aid)
async def test_aid_generation_no_unique_ids_handles_collision(
hass, device_reg, entity_reg
):
"""Test colliding aids is stable."""
aid_storage = AccessoryAidStorage(hass)
await aid_storage.async_initialize()
seen_aids = set()
collisions = []
for light_id in range(0, 220):
entity_id = f"light.light{light_id}"
hass.states.async_set(entity_id, "on")
expected_aid = adler32(entity_id.encode("utf-8"))
aid = aid_storage.get_or_allocate_aid_for_entity_id(entity_id)
if aid != expected_aid:
collisions.append(entity_id)
assert aid not in seen_aids
seen_aids.add(aid)
assert collisions == [
"light.light201",
"light.light202",
"light.light203",
"light.light204",
"light.light205",
"light.light206",
"light.light207",
"light.light208",
"light.light209",
"light.light211",
"light.light212",
"light.light213",
"light.light214",
"light.light215",
"light.light216",
"light.light217",
"light.light218",
"light.light219",
]
assert aid_storage.allocations == {
"light.light0": 514851983,
"light.light1": 514917520,
"light.light10": 594609344,
"light.light100": 677446896,
"light.light101": 677512433,
"light.light102": 677577970,
"light.light103": 677643507,
"light.light104": 677709044,
"light.light105": 677774581,
"light.light106": 677840118,
"light.light107": 677905655,
"light.light108": 677971192,
"light.light109": 678036729,
"light.light11": 594674881,
"light.light110": 677577969,
"light.light111": 677643506,
"light.light112": 677709043,
"light.light113": 677774580,
"light.light114": 677840117,
"light.light115": 677905654,
"light.light116": 677971191,
"light.light117": 678036728,
"light.light118": 678102265,
"light.light119": 678167802,
"light.light12": 594740418,
"light.light120": 677709042,
"light.light121": 677774579,
"light.light122": 677840116,
"light.light123": 677905653,
"light.light124": 677971190,
"light.light125": 678036727,
"light.light126": 678102264,
"light.light127": 678167801,
"light.light128": 678233338,
"light.light129": 678298875,
"light.light13": 594805955,
"light.light130": 677840115,
"light.light131": 677905652,
"light.light132": 677971189,
"light.light133": 678036726,
"light.light134": 678102263,
"light.light135": 678167800,
"light.light136": 678233337,
"light.light137": 678298874,
"light.light138": 678364411,
"light.light139": 678429948,
"light.light14": 594871492,
"light.light140": 677971188,
"light.light141": 678036725,
"light.light142": 678102262,
"light.light143": 678167799,
"light.light144": 678233336,
"light.light145": 678298873,
"light.light146": 678364410,
"light.light147": 678429947,
"light.light148": 678495484,
"light.light149": 678561021,
"light.light15": 594937029,
"light.light150": 678102261,
"light.light151": 678167798,
"light.light152": 678233335,
"light.light153": 678298872,
"light.light154": 678364409,
"light.light155": 678429946,
"light.light156": 678495483,
"light.light157": 678561020,
"light.light158": 678626557,
"light.light159": 678692094,
"light.light16": 595002566,
"light.light160": 678233334,
"light.light161": 678298871,
"light.light162": 678364408,
"light.light163": 678429945,
"light.light164": 678495482,
"light.light165": 678561019,
"light.light166": 678626556,
"light.light167": 678692093,
"light.light168": 678757630,
"light.light169": 678823167,
"light.light17": 595068103,
"light.light170": 678364407,
"light.light171": 678429944,
"light.light172": 678495481,
"light.light173": 678561018,
"light.light174": 678626555,
"light.light175": 678692092,
"light.light176": 678757629,
"light.light177": 678823166,
"light.light178": 678888703,
"light.light179": 678954240,
"light.light18": 595133640,
"light.light180": 678495480,
"light.light181": 678561017,
"light.light182": 678626554,
"light.light183": 678692091,
"light.light184": 678757628,
"light.light185": 678823165,
"light.light186": 678888702,
"light.light187": 678954239,
"light.light188": 679019776,
"light.light189": 679085313,
"light.light19": 595199177,
"light.light190": 678626553,
"light.light191": 678692090,
"light.light192": 678757627,
"light.light193": 678823164,
"light.light194": 678888701,
"light.light195": 678954238,
"light.light196": 679019775,
"light.light197": 679085312,
"light.light198": 679150849,
"light.light199": 679216386,
"light.light2": 514983057,
"light.light20": 594740417,
"light.light200": 677643505,
"light.light201": 1682157970,
"light.light202": 1665380351,
"light.light203": 1648602732,
"light.light204": 1631825113,
"light.light205": 1615047494,
"light.light206": 1598269875,
"light.light207": 1581492256,
"light.light208": 1833156541,
"light.light209": 1816378922,
"light.light21": 594805954,
"light.light210": 677774578,
"light.light211": 1614900399,
"light.light212": 1631678018,
"light.light213": 1648455637,
"light.light214": 1531012304,
"light.light215": 1547789923,
"light.light216": 1564567542,
"light.light217": 1581345161,
"light.light218": 1732343732,
"light.light219": 1749121351,
"light.light22": 594871491,
"light.light23": 594937028,
"light.light24": 595002565,
"light.light25": 595068102,
"light.light26": 595133639,
"light.light27": 595199176,
"light.light28": 595264713,
"light.light29": 595330250,
"light.light3": 515048594,
"light.light30": 594871490,
"light.light31": 594937027,
"light.light32": 595002564,
"light.light33": 595068101,
"light.light34": 595133638,
"light.light35": 595199175,
"light.light36": 595264712,
"light.light37": 595330249,
"light.light38": 595395786,
"light.light39": 595461323,
"light.light4": 515114131,
"light.light40": 595002563,
"light.light41": 595068100,
"light.light42": 595133637,
"light.light43": 595199174,
"light.light44": 595264711,
"light.light45": 595330248,
"light.light46": 595395785,
"light.light47": 595461322,
"light.light48": 595526859,
"light.light49": 595592396,
"light.light5": 515179668,
"light.light50": 595133636,
"light.light51": 595199173,
"light.light52": 595264710,
"light.light53": 595330247,
"light.light54": 595395784,
"light.light55": 595461321,
"light.light56": 595526858,
"light.light57": 595592395,
"light.light58": 595657932,
"light.light59": 595723469,
"light.light6": 515245205,
"light.light60": 595264709,
"light.light61": 595330246,
"light.light62": 595395783,
"light.light63": 595461320,
"light.light64": 595526857,
"light.light65": 595592394,
"light.light66": 595657931,
"light.light67": 595723468,
"light.light68": 595789005,
"light.light69": 595854542,
"light.light7": 515310742,
"light.light70": 595395782,
"light.light71": 595461319,
"light.light72": 595526856,
"light.light73": 595592393,
"light.light74": 595657930,
"light.light75": 595723467,
"light.light76": 595789004,
"light.light77": 595854541,
"light.light78": 595920078,
"light.light79": 595985615,
"light.light8": 515376279,
"light.light80": 595526855,
"light.light81": 595592392,
"light.light82": 595657929,
"light.light83": 595723466,
"light.light84": 595789003,
"light.light85": 595854540,
"light.light86": 595920077,
"light.light87": 595985614,
"light.light88": 596051151,
"light.light89": 596116688,
"light.light9": 515441816,
"light.light90": 595657928,
"light.light91": 595723465,
"light.light92": 595789002,
"light.light93": 595854539,
"light.light94": 595920076,
"light.light95": 595985613,
"light.light96": 596051150,
"light.light97": 596116687,
"light.light98": 596182224,
"light.light99": 596247761,
}
await aid_storage.async_save()
await hass.async_block_till_done()
aid_storage = AccessoryAidStorage(hass)
await aid_storage.async_initialize()
assert aid_storage.allocations == {
"light.light0": 514851983,
"light.light1": 514917520,
"light.light10": 594609344,
"light.light100": 677446896,
"light.light101": 677512433,
"light.light102": 677577970,
"light.light103": 677643507,
"light.light104": 677709044,
"light.light105": 677774581,
"light.light106": 677840118,
"light.light107": 677905655,
"light.light108": 677971192,
"light.light109": 678036729,
"light.light11": 594674881,
"light.light110": 677577969,
"light.light111": 677643506,
"light.light112": 677709043,
"light.light113": 677774580,
"light.light114": 677840117,
"light.light115": 677905654,
"light.light116": 677971191,
"light.light117": 678036728,
"light.light118": 678102265,
"light.light119": 678167802,
"light.light12": 594740418,
"light.light120": 677709042,
"light.light121": 677774579,
"light.light122": 677840116,
"light.light123": 677905653,
"light.light124": 677971190,
"light.light125": 678036727,
"light.light126": 678102264,
"light.light127": 678167801,
"light.light128": 678233338,
"light.light129": 678298875,
"light.light13": 594805955,
"light.light130": 677840115,
"light.light131": 677905652,
"light.light132": 677971189,
"light.light133": 678036726,
"light.light134": 678102263,
"light.light135": 678167800,
"light.light136": 678233337,
"light.light137": 678298874,
"light.light138": 678364411,
"light.light139": 678429948,
"light.light14": 594871492,
"light.light140": 677971188,
"light.light141": 678036725,
"light.light142": 678102262,
"light.light143": 678167799,
"light.light144": 678233336,
"light.light145": 678298873,
"light.light146": 678364410,
"light.light147": 678429947,
"light.light148": 678495484,
"light.light149": 678561021,
"light.light15": 594937029,
"light.light150": 678102261,
"light.light151": 678167798,
"light.light152": 678233335,
"light.light153": 678298872,
"light.light154": 678364409,
"light.light155": 678429946,
"light.light156": 678495483,
"light.light157": 678561020,
"light.light158": 678626557,
"light.light159": 678692094,
"light.light16": 595002566,
"light.light160": 678233334,
"light.light161": 678298871,
"light.light162": 678364408,
"light.light163": 678429945,
"light.light164": 678495482,
"light.light165": 678561019,
"light.light166": 678626556,
"light.light167": 678692093,
"light.light168": 678757630,
"light.light169": 678823167,
"light.light17": 595068103,
"light.light170": 678364407,
"light.light171": 678429944,
"light.light172": 678495481,
"light.light173": 678561018,
"light.light174": 678626555,
"light.light175": 678692092,
"light.light176": 678757629,
"light.light177": 678823166,
"light.light178": 678888703,
"light.light179": 678954240,
"light.light18": 595133640,
"light.light180": 678495480,
"light.light181": 678561017,
"light.light182": 678626554,
"light.light183": 678692091,
"light.light184": 678757628,
"light.light185": 678823165,
"light.light186": 678888702,
"light.light187": 678954239,
"light.light188": 679019776,
"light.light189": 679085313,
"light.light19": 595199177,
"light.light190": 678626553,
"light.light191": 678692090,
"light.light192": 678757627,
"light.light193": 678823164,
"light.light194": 678888701,
"light.light195": 678954238,
"light.light196": 679019775,
"light.light197": 679085312,
"light.light198": 679150849,
"light.light199": 679216386,
"light.light2": 514983057,
"light.light20": 594740417,
"light.light200": 677643505,
"light.light201": 1682157970,
"light.light202": 1665380351,
"light.light203": 1648602732,
"light.light204": 1631825113,
"light.light205": 1615047494,
"light.light206": 1598269875,
"light.light207": 1581492256,
"light.light208": 1833156541,
"light.light209": 1816378922,
"light.light21": 594805954,
"light.light210": 677774578,
"light.light211": 1614900399,
"light.light212": 1631678018,
"light.light213": 1648455637,
"light.light214": 1531012304,
"light.light215": 1547789923,
"light.light216": 1564567542,
"light.light217": 1581345161,
"light.light218": 1732343732,
"light.light219": 1749121351,
"light.light22": 594871491,
"light.light23": 594937028,
"light.light24": 595002565,
"light.light25": 595068102,
"light.light26": 595133639,
"light.light27": 595199176,
"light.light28": 595264713,
"light.light29": 595330250,
"light.light3": 515048594,
"light.light30": 594871490,
"light.light31": 594937027,
"light.light32": 595002564,
"light.light33": 595068101,
"light.light34": 595133638,
"light.light35": 595199175,
"light.light36": 595264712,
"light.light37": 595330249,
"light.light38": 595395786,
"light.light39": 595461323,
"light.light4": 515114131,
"light.light40": 595002563,
"light.light41": 595068100,
"light.light42": 595133637,
"light.light43": 595199174,
"light.light44": 595264711,
"light.light45": 595330248,
"light.light46": 595395785,
"light.light47": 595461322,
"light.light48": 595526859,
"light.light49": 595592396,
"light.light5": 515179668,
"light.light50": 595133636,
"light.light51": 595199173,
"light.light52": 595264710,
"light.light53": 595330247,
"light.light54": 595395784,
"light.light55": 595461321,
"light.light56": 595526858,
"light.light57": 595592395,
"light.light58": 595657932,
"light.light59": 595723469,
"light.light6": 515245205,
"light.light60": 595264709,
"light.light61": 595330246,
"light.light62": 595395783,
"light.light63": 595461320,
"light.light64": 595526857,
"light.light65": 595592394,
"light.light66": 595657931,
"light.light67": 595723468,
"light.light68": 595789005,
"light.light69": 595854542,
"light.light7": 515310742,
"light.light70": 595395782,
"light.light71": 595461319,
"light.light72": 595526856,
"light.light73": 595592393,
"light.light74": 595657930,
"light.light75": 595723467,
"light.light76": 595789004,
"light.light77": 595854541,
"light.light78": 595920078,
"light.light79": 595985615,
"light.light8": 515376279,
"light.light80": 595526855,
"light.light81": 595592392,
"light.light82": 595657929,
"light.light83": 595723466,
"light.light84": 595789003,
"light.light85": 595854540,
"light.light86": 595920077,
"light.light87": 595985614,
"light.light88": 596051151,
"light.light89": 596116688,
"light.light9": 515441816,
"light.light90": 595657928,
"light.light91": 595723465,
"light.light92": 595789002,
"light.light93": 595854539,
"light.light94": 595920076,
"light.light95": 595985613,
"light.light96": 596051150,
"light.light97": 596116687,
"light.light98": 596182224,
"light.light99": 596247761,
}
aid_storage_path = hass.config.path(STORAGE_DIR, AID_MANAGER_STORAGE_KEY)
if await hass.async_add_executor_job(os.path.exists, aid_storage_path):
await hass.async_add_executor_job(os.unlink, aid_storage_path)
| 35.017572
| 93
| 0.613567
| 2,059
| 21,921
| 6.396309
| 0.271491
| 0.02126
| 0.012149
| 0.01139
| 0.913743
| 0.904784
| 0.904784
| 0.892559
| 0.879347
| 0.861655
| 0
| 0.324641
| 0.260025
| 21,921
| 625
| 94
| 35.0736
| 0.48727
| 0.004744
| 0
| 0.850594
| 0
| 0
| 0.304039
| 0.011791
| 0
| 0
| 0
| 0
| 0.022071
| 1
| 0.003396
| false
| 0
| 0.013582
| 0
| 0.020374
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
42246b395c5e0029be1661309e4bd1bb67776497
| 245
|
py
|
Python
|
Mundo 1/Ex05.py
|
legna7/Python
|
52e0b642d1b7acc592ec82dd360c5697fb0765db
|
[
"MIT"
] | null | null | null |
Mundo 1/Ex05.py
|
legna7/Python
|
52e0b642d1b7acc592ec82dd360c5697fb0765db
|
[
"MIT"
] | null | null | null |
Mundo 1/Ex05.py
|
legna7/Python
|
52e0b642d1b7acc592ec82dd360c5697fb0765db
|
[
"MIT"
] | null | null | null |
n = int(input('digite um nr: '))
a = n - 1
print(a)
s = n + 1
print(s)
print('Analisando o vlr {}, seu antecessor {} e o seu sucessor {}.'.format(n,a,s))
print('Analisando o vlr {}, seu antecessor {} e o seu sucessor {}.'.format(n,(n-1), (n+1)))
| 35
| 91
| 0.6
| 46
| 245
| 3.195652
| 0.391304
| 0.054422
| 0.095238
| 0.231293
| 0.721088
| 0.721088
| 0.721088
| 0.721088
| 0.721088
| 0.721088
| 0
| 0.019704
| 0.171429
| 245
| 7
| 91
| 35
| 0.704434
| 0
| 0
| 0
| 0
| 0
| 0.536585
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
4224be2c946173761c571186f7ba7adca4ecf446
| 191
|
py
|
Python
|
ocha/clis/__init__.py
|
Blesproject/GENERATOR
|
a56c6ee6086dcd268bd021355131f0c23508b12d
|
[
"MIT"
] | 1
|
2019-01-27T16:32:24.000Z
|
2019-01-27T16:32:24.000Z
|
ocha/clis/__init__.py
|
Blesproject/GENERATOR
|
a56c6ee6086dcd268bd021355131f0c23508b12d
|
[
"MIT"
] | 5
|
2020-03-24T16:37:57.000Z
|
2021-04-30T20:39:28.000Z
|
ocha/clis/__init__.py
|
hammer-code/ocha-cli
|
bd066318ddebfaaa7c30d8bff997e2b111400001
|
[
"MIT"
] | null | null | null |
from .create import *
from .build import *
from .run import *
from .deploy import *
from .generate import *
from .login import *
from .logout import *
from .moduls import *
from .neo import *
| 21.222222
| 23
| 0.722513
| 27
| 191
| 5.111111
| 0.407407
| 0.57971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183246
| 191
| 9
| 24
| 21.222222
| 0.884615
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4229c424474a2f4709eb32bc9ac52f8c073b5a55
| 124
|
py
|
Python
|
OrderMatchingEngine/__init__.py
|
nicoloridulfo/Order-Matching-Engine
|
32c9b4d03099d5baab0d71ad214206c7595086ba
|
[
"MIT"
] | 33
|
2020-03-17T19:23:21.000Z
|
2022-03-29T06:24:47.000Z
|
OrderMatchingEngine/__init__.py
|
jiangtiantu/Order-Matching-Engine
|
011fd99bfd6802580f49c1e7067394c74d0e9516
|
[
"MIT"
] | 5
|
2020-03-24T06:45:18.000Z
|
2022-03-29T16:52:35.000Z
|
OrderMatchingEngine/__init__.py
|
jiangtiantu/Order-Matching-Engine
|
011fd99bfd6802580f49c1e7067394c74d0e9516
|
[
"MIT"
] | 12
|
2020-03-18T15:43:49.000Z
|
2022-01-20T21:05:13.000Z
|
from OrderMatchingEngine.Order import *
from OrderMatchingEngine.Orderbook import *
from OrderMatchingEngine.Trade import *
| 31
| 43
| 0.854839
| 12
| 124
| 8.833333
| 0.5
| 0.650943
| 0.54717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 124
| 3
| 44
| 41.333333
| 0.946429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
425cc2012d6e6688619d32db90c141f8471dfba5
| 4,683
|
py
|
Python
|
comic_site/blog/views.py
|
ExCorde314/comic_site
|
31e4bb0f3dd1f25eb497d8374de301a07f74c805
|
[
"MIT"
] | 1
|
2018-01-25T21:36:09.000Z
|
2018-01-25T21:36:09.000Z
|
comic_site/blog/views.py
|
ExCorde314/comic_site
|
31e4bb0f3dd1f25eb497d8374de301a07f74c805
|
[
"MIT"
] | null | null | null |
comic_site/blog/views.py
|
ExCorde314/comic_site
|
31e4bb0f3dd1f25eb497d8374de301a07f74c805
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, get_object_or_404, redirect
from .models import Post
from .forms import AddPost, ChangePost, DeletePost
from django.http import Http404
# The landing page of the blog
def index(request):
# Gets whether or not the user is logged in
user_logged_in = request.user.is_authenticated
# Gets the latest and first blog posts
post = Post.objects.filter(date_published__isnull=False).latest('date_published')
earliest = Post.objects.filter(date_published__isnull=False).earliest('date_published')
# Gets the next and previous post
try:
next_post = post.get_next_by_date_published().id
except Post.DoesNotExist:
next_post = post.id
try:
previous_post = post.get_previous_by_date_published().id
except Post.DoesNotExist:
previous_post = post.id
# Prepares the context for the page
context = {
'post': post,
'first': earliest.id,
'next': next_post,
'previous': previous_post,
'user_logged_in': user_logged_in,
}
# Renders the result
return render(request, 'blog/single.html', context)
# Single blog post page
def single(request, post_id):
# Gets whether or not the user is logged in
user_logged_in = request.user.is_authenticated
# Gets the current post and the first post
post = get_object_or_404(Post, pk=post_id)
earliest = Post.objects.filter(date_published__isnull=False).earliest('date_published')
# Gets the next and previous post
try:
next_post = post.get_next_by_date_published().id
except Post.DoesNotExist:
next_post = post.id
try:
previous_post = post.get_previous_by_date_published().id
except Post.DoesNotExist:
previous_post = post.id
# Prepares the context for the page
context = {
'post': post,
'first': earliest.id,
'next': next_post,
'previous': previous_post,
'user_logged_in': user_logged_in,
}
# Renders the result
return render(request, 'blog/single.html', context)
# Add blog post page
def add(request):
if not request.user.is_authenticated or not request.user.has_perm('blog.add_post'):
raise Http404
if request.method == "POST":
form = AddPost(request.POST)
if not form.is_valid():
# Prepares the context for the page
context = {
'user_logged_in': True,
'form': form,
}
return render(request, 'blog/add.html', context)
form.save()
return redirect('blog:index')
form = AddPost()
# Prepares the context for the page
context = {
'user_logged_in': True,
'form': form,
}
return render(request, 'blog/add.html', context)
# Change blog post page
def change(request, post_id):
if not request.user.is_authenticated or not request.user.has_perm('blog.change_post'):
raise Http404
post = get_object_or_404(Post, pk=post_id)
if request.method == "POST":
form = ChangePost(request.POST, instance=post)
if not form.is_valid():
# Prepares the context for the page
context = {
'user_logged_in': True,
'form': form,
}
return render(request, 'blog/change.html', context)
form.save()
return redirect('blog:single', post_id=post_id)
form = ChangePost(instance=post)
# Prepares the context for the page
context = {
'user_logged_in': True,
'form': form,
}
return render(request, 'blog/change.html', context)
# Delete blog post page
def delete(request, post_id):
if not request.user.is_authenticated or not request.user.has_perm('blog.delete_post'):
raise Http404
post = get_object_or_404(Post, pk=post_id)
if request.method == "POST":
form = DeletePost(request.POST)
if not form.is_valid():
# Prepares the context for the page
context = {
'user_logged_in': True,
'form': form,
'post': post,
}
return render(request, 'blog/delete.html', context)
post.delete()
return redirect('blog:index')
form = DeletePost()
# Prepares the context for the page
context = {
'user_logged_in': True,
'form': form,
'post': post,
}
return render(request, 'blog/delete.html', context)
| 28.907407
| 92
| 0.604313
| 569
| 4,683
| 4.801406
| 0.13181
| 0.040996
| 0.052709
| 0.061493
| 0.808931
| 0.787335
| 0.787335
| 0.753294
| 0.753294
| 0.742313
| 0
| 0.007346
| 0.30237
| 4,683
| 162
| 93
| 28.907407
| 0.828895
| 0.138586
| 0
| 0.72381
| 0
| 0
| 0.113707
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.038095
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
428444bdb29d46dbda28698174b789fda06a0f9c
| 16,150
|
py
|
Python
|
video_level_models.py
|
amitkumarj441/youtube-8m
|
18906adf378ffdf18ce1441c454489373f918420
|
[
"Apache-2.0"
] | null | null | null |
video_level_models.py
|
amitkumarj441/youtube-8m
|
18906adf378ffdf18ce1441c454489373f918420
|
[
"Apache-2.0"
] | null | null | null |
video_level_models.py
|
amitkumarj441/youtube-8m
|
18906adf378ffdf18ce1441c454489373f918420
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains model definitions."""
import math
import models
import tensorflow as tf
import utils
from tensorflow import flags
import tensorflow.contrib.slim as slim
FLAGS = flags.FLAGS
flags.DEFINE_integer(
"moe_num_mixtures", 2,
"The number of mixtures (excluding the dummy 'expert') used for MoeModel.")
class LogisticModel(models.BaseModel):
"""Logistic model with L2 regularization."""
def create_model(self, model_input, vocab_size, l2_penalty=1e-8, **unused_params):
"""Creates a logistic model.
Args:
model_input: 'batch' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes."""
output = slim.fully_connected(
model_input, vocab_size, activation_fn=tf.nn.sigmoid,
weights_regularizer=slim.l2_regularizer(l2_penalty))
return {"predictions": output}
class MoeModel(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
num_mixtures=None,
l2_penalty=1e-8,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
expert_activations = slim.fully_connected(
model_input,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
final_probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
final_probabilities = tf.reshape(final_probabilities_by_class_and_batch,
[-1, vocab_size])
return {"predictions": final_probabilities}
class MonoModel(models.BaseModel):
""" Mono layer NN """
def create_model(self, model_input, vocab_size, l2_penalty=1e-8, **unused_params):
num_hidden = 1024
hidden = slim.fully_connected(
model_input, num_hidden, activation_fn=tf.nn.relu)
output = slim.fully_connected(
hidden, vocab_size, activation_fn=tf.nn.softmax)
return {"predictions": output}
class MoNN2LModel(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
num_mixtures=None,
l2_penalty=1e-6,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
h1Units = 4096
A1 = slim.fully_connected(
model_input, h1Units, activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope='FC_H1')
h2Units = 4096
A2 = slim.fully_connected(
A1, h2Units, activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope='FC_H2')
#
expert_activations = slim.fully_connected(
A2,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
final_probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
final_probabilities = tf.reshape(final_probabilities_by_class_and_batch,
[-1, vocab_size])
return {"predictions": final_probabilities}
class MoNN2LL2Pen8Model(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
num_mixtures=None,
l2_penalty=1e-8,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
h1Units = 4096
A1 = slim.fully_connected(
model_input, h1Units, activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope='FC_H1')
h2Units = 4096
A2 = slim.fully_connected(
A1, h2Units, activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope='FC_H2')
#
expert_activations = slim.fully_connected(
A2,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
final_probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
final_probabilities = tf.reshape(final_probabilities_by_class_and_batch,
[-1, vocab_size])
return {"predictions": final_probabilities}
class MoNN3LModel(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
num_mixtures=None,
l2_penalty=1e-6,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
a1Units = 4096
A1 = slim.fully_connected(
model_input, a1Units, activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope='FC_HA1')
a2Units = 4096
A2 = slim.fully_connected(
A1, a2Units, activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope='FC_HA2')
a2Units = 4096
A3 = slim.fully_connected(
A2, a2Units, activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope='FC_HA3')
expert_activations = slim.fully_connected(
A3,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
final_probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
final_probabilities = tf.reshape(final_probabilities_by_class_and_batch,
[-1, vocab_size])
return {"predictions": final_probabilities}
class CgMoeModel(models.BaseModel):
"""
CG(Context Gating) is added before the MoE(Mixture of Experts)
"""
def create_model(self,
model_input,
vocab_size,
num_mixtures=None,
l2_penalty=1e-8,
**unused_params):
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
numx = 128+1024
w = tf.Variable(tf.truncated_normal([numx,numx], stddev=0.1), name="w")
b = tf.Variable(tf.zeros([numx]), name="b")
cg = tf.multiply( tf.nn.sigmoid(tf.matmul(model_input, w) + b),
model_input)
gate_activations = slim.fully_connected(
cg,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
expert_activations = slim.fully_connected(
cg,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
final_probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
final_probabilities = tf.reshape(final_probabilities_by_class_and_batch,
[-1, vocab_size])
return {"predictions": final_probabilities}
class Cg2MoeModel(models.BaseModel):
"""
CG(Context Gating) is added before and after the MoE(Mixture of Experts)
"""
def create_model(self,
model_input,
vocab_size,
num_mixtures=None,
l2_penalty=1e-8,
**unused_params):
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
numx = 128+1024
w = tf.Variable(tf.truncated_normal([numx,numx], stddev=0.1), name="w")
b = tf.Variable(tf.zeros([numx]), name="b")
cg = tf.multiply( tf.nn.sigmoid(tf.matmul(model_input, w) + b),
model_input)
gate_activations = slim.fully_connected(
cg,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
expert_activations = slim.fully_connected(
cg,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
final_probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
final_probabilities = tf.reshape(final_probabilities_by_class_and_batch,
[-1, vocab_size])
w2 = tf.Variable(tf.truncated_normal([vocab_size,vocab_size], stddev=0.1), name="w2")
b2 = tf.Variable(tf.zeros([vocab_size]), name="b2")
cg2 = tf.multiply( tf.nn.sigmoid(tf.matmul(final_probabilities, w2) + b2),
final_probabilities)
return {"predictions": cg2}
| 37.910798
| 89
| 0.660619
| 1,987
| 16,150
| 5.148968
| 0.110216
| 0.076337
| 0.038706
| 0.046916
| 0.884371
| 0.870883
| 0.864334
| 0.849966
| 0.840778
| 0.840778
| 0
| 0.019004
| 0.25387
| 16,150
| 425
| 90
| 38
| 0.830041
| 0.299443
| 0
| 0.856604
| 0
| 0
| 0.026923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030189
| false
| 0
| 0.022642
| 0
| 0.113208
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4292cf1e77fb71ac37748bd434517b2e6774af9a
| 196
|
py
|
Python
|
todoapp/tests/__init__.py
|
compilers-ai/create-django-app
|
1b3e8b30bd8428f163788428b6c37aecfa406e07
|
[
"MIT"
] | 70
|
2021-03-03T08:59:42.000Z
|
2022-01-10T14:02:28.000Z
|
todoapp/tests/__init__.py
|
compilers-ai/create-django-app
|
1b3e8b30bd8428f163788428b6c37aecfa406e07
|
[
"MIT"
] | 2
|
2021-03-12T22:29:56.000Z
|
2021-12-13T05:46:06.000Z
|
todoapp/tests/__init__.py
|
imagineai/create-django-app
|
e34337bfb7f1719f011344f856a385b21f01062f
|
[
"MIT"
] | 6
|
2021-03-06T08:28:29.000Z
|
2021-09-29T13:10:29.000Z
|
from .comment_test import *
from .commentSerializer_test import *
from .createPerson_test import *
from .personSerializer_test import *
from .todo_test import *
from .todoSerializer_test import *
| 28
| 37
| 0.816327
| 24
| 196
| 6.416667
| 0.375
| 0.38961
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 196
| 6
| 38
| 32.666667
| 0.895349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
42a153507adf5f1ba6e9e4bf9e031238363dea18
| 34,696
|
py
|
Python
|
template.py
|
Python16224/OtelOtomasyon
|
d17906a6b45d71066825ae05cd5a418e6e2f6994
|
[
"MIT"
] | 3
|
2021-01-05T21:15:17.000Z
|
2021-05-09T16:52:10.000Z
|
template.py
|
Python16224/OtelOtomasyon
|
d17906a6b45d71066825ae05cd5a418e6e2f6994
|
[
"MIT"
] | null | null | null |
template.py
|
Python16224/OtelOtomasyon
|
d17906a6b45d71066825ae05cd5a418e6e2f6994
|
[
"MIT"
] | 1
|
2021-01-06T21:16:22.000Z
|
2021-01-06T21:16:22.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main_ui.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(775, 350)
Form.setMinimumSize(QtCore.QSize(775, 350))
Form.setMaximumSize(QtCore.QSize(775, 350))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(192, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(192, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(192, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
Form.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
Form.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Form.setWindowIcon(icon)
self.label_2 = QtWidgets.QLabel(Form)
self.label_2.setGeometry(QtCore.QRect(20, 70, 80, 30))
self.label_2.setObjectName("label_2")
self.label = QtWidgets.QLabel(Form)
self.label.setGeometry(QtCore.QRect(20, 30, 80, 30))
self.label.setObjectName("label")
self.label_3 = QtWidgets.QLabel(Form)
self.label_3.setEnabled(True)
self.label_3.setGeometry(QtCore.QRect(230, 30, 80, 30))
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(Form)
self.label_4.setGeometry(QtCore.QRect(230, 70, 80, 30))
self.label_4.setObjectName("label_4")
self.txt_fee = QtWidgets.QLineEdit(Form)
self.txt_fee.setGeometry(QtCore.QRect(300, 70, 100, 30))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txt_fee.sizePolicy().hasHeightForWidth())
self.txt_fee.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.txt_fee.setPalette(palette)
self.txt_fee.setObjectName("txt_fee")
self.cmb_room = QtWidgets.QComboBox(Form)
self.cmb_room.setGeometry(QtCore.QRect(300, 30, 100, 30))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cmb_room.sizePolicy().hasHeightForWidth())
self.cmb_room.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.cmb_room.setPalette(palette)
self.cmb_room.setObjectName("cmb_room")
self.btn_check_in = QtWidgets.QPushButton(Form)
self.btn_check_in.setGeometry(QtCore.QRect(90, 290, 100, 30))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.btn_check_in.setPalette(palette)
self.btn_check_in.setObjectName("btn_check_in")
self.btn_check_out = QtWidgets.QPushButton(Form)
self.btn_check_out.setGeometry(QtCore.QRect(230, 290, 100, 30))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.btn_check_out.setPalette(palette)
self.btn_check_out.setObjectName("btn_check_out")
self.btn_add_person = QtWidgets.QPushButton(Form)
self.btn_add_person.setGeometry(QtCore.QRect(540, 290, 100, 30))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(64, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.btn_add_person.setPalette(palette)
self.btn_add_person.setObjectName("btn_add_person")
self.check_in_date = QtWidgets.QDateEdit(Form)
self.check_in_date.setGeometry(QtCore.QRect(110, 30, 100, 30))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.check_in_date.setPalette(palette)
self.check_in_date.setCalendarPopup(True)
self.check_in_date.setObjectName("check_in_date")
self.check_out_date = QtWidgets.QDateEdit(Form)
self.check_out_date.setGeometry(QtCore.QRect(110, 70, 100, 30))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(144, 144, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.check_out_date.setPalette(palette)
self.check_out_date.setCalendarPopup(True)
self.check_out_date.setObjectName("check_out_date")
self.table_customer = QtWidgets.QTableWidget(Form)
self.table_customer.setGeometry(QtCore.QRect(20, 140, 381, 120))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(220, 220, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(220, 220, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.table_customer.setPalette(palette)
self.table_customer.setRowCount(0)
self.table_customer.setColumnCount(0)
self.table_customer.setObjectName("table_customer")
self.table_customer.horizontalHeader().setVisible(True)
self.lbl_camera = QtWidgets.QLabel(Form)
self.lbl_camera.setGeometry(QtCore.QRect(430, 30, 320, 240))
self.lbl_camera.setText("")
self.lbl_camera.setObjectName("lbl_camera")
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Otel Otomasyon"))
self.label_2.setText(_translate("Form", "Çıkış Tarihi :"))
self.label.setText(_translate("Form", "Giriş Tarihi :"))
self.label_3.setText(_translate("Form", "Oda No :"))
self.label_4.setText(_translate("Form", "Fiyat :"))
self.btn_check_in.setText(_translate("Form", "GİRİŞ YAP"))
self.btn_check_out.setText(_translate("Form", "ÇIKIŞ YAP"))
self.btn_add_person.setText(_translate("Form", "KİŞİ EKLE"))
| 58.410774
| 104
| 0.692472
| 4,191
| 34,696
| 5.706514
| 0.048199
| 0.177747
| 0.106372
| 0.139614
| 0.899189
| 0.880582
| 0.874645
| 0.870045
| 0.870045
| 0.869083
| 0
| 0.040963
| 0.183105
| 34,696
| 593
| 105
| 58.509275
| 0.802703
| 0.00784
| 0
| 0.815199
| 1
| 0
| 0.007729
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003454
| false
| 0
| 0.001727
| 0
| 0.006908
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
42abe198c14192aa2932d10a9f9b57767b0164cc
| 3,111
|
py
|
Python
|
helpdesk/migrations/0034_create_email_template_for_merged.py
|
AmatorAVG/django-helpdesk-atoria
|
0e530b02a6ff0144e9a7d0f12a2af4e33f6b7ed9
|
[
"BSD-3-Clause",
"CC-BY-4.0",
"MIT"
] | 789
|
2016-10-17T19:11:15.000Z
|
2022-03-27T11:57:20.000Z
|
helpdesk/migrations/0034_create_email_template_for_merged.py
|
AmatorAVG/django-helpdesk-atoria
|
0e530b02a6ff0144e9a7d0f12a2af4e33f6b7ed9
|
[
"BSD-3-Clause",
"CC-BY-4.0",
"MIT"
] | 559
|
2016-10-12T08:16:54.000Z
|
2022-03-31T19:57:14.000Z
|
helpdesk/migrations/0034_create_email_template_for_merged.py
|
AmatorAVG/django-helpdesk-atoria
|
0e530b02a6ff0144e9a7d0f12a2af4e33f6b7ed9
|
[
"BSD-3-Clause",
"CC-BY-4.0",
"MIT"
] | 441
|
2016-10-13T08:31:33.000Z
|
2022-03-30T21:04:45.000Z
|
# Generated by Django 2.2.13 on 2020-10-29 22:34
from django.db import migrations
def forwards_func(apps, schema_editor):
EmailTemplate = apps.get_model("helpdesk", "EmailTemplate")
db_alias = schema_editor.connection.alias
EmailTemplate.objects.using(db_alias).create(
id=EmailTemplate.objects.order_by('-id').first().id + 1, # because PG sequences are not reset
template_name='merged',
subject='(Merged)',
heading='Ticket merged',
plain_text="""Hello,
This is a courtesy e-mail to let you know that ticket {{ ticket.ticket }} ("{{ ticket.title }}") by {{ ticket.submitter_email }} has been merged to ticket {{ ticket.merged_to.ticket }}.
From now on, please answer on this ticket, or you can include the tag {{ ticket.merged_to.ticket }} in your e-mail subject.""",
html="""<p style="font-family: sans-serif; font-size: 1em;">Hello,</p>
<p style="font-family: sans-serif; font-size: 1em;">This is a courtesy e-mail to let you know that ticket <b>{{ ticket.ticket }}</b> (<em>{{ ticket.title }}</em>) by {{ ticket.submitter_email }} has been merged to ticket <a href="{{ ticket.merged_to.staff_url }}">{{ ticket.merged_to.ticket }}</a>.</p>
<p style="font-family: sans-serif; font-size: 1em;">From now on, please answer on this ticket, or you can include the tag <b>{{ ticket.merged_to.ticket }}</b> in your e-mail subject.</p>""",
locale='en'
)
EmailTemplate.objects.using(db_alias).create(
id=EmailTemplate.objects.order_by('-id').first().id + 1, # because PG sequences are not reset
template_name='merged',
subject='(Fusionné)',
heading='Ticket Fusionné',
plain_text="""Bonjour,
Ce courriel indicatif permet de vous prévenir que le ticket {{ ticket.ticket }} ("{{ ticket.title }}") par {{ ticket.submitter_email }} a été fusionné au ticket {{ ticket.merged_to.ticket }}.
Veillez à répondre sur ce ticket dorénavant, ou bien inclure la balise {{ ticket.merged_to.ticket }} dans le sujet de votre réponse par mail.""",
html="""<p style="font-family: sans-serif; font-size: 1em;">Bonjour,</p>
<p style="font-family: sans-serif; font-size: 1em;">Ce courriel indicatif permet de vous prévenir que le ticket <b>{{ ticket.ticket }}</b> (<em>{{ ticket.title }}</em>) par {{ ticket.submitter_email }} a été fusionné au ticket <a href="{{ ticket.merged_to.staff_url }}">{{ ticket.merged_to.ticket }}</a>.</p>
<p style="font-family: sans-serif; font-size: 1em;">Veillez à répondre sur ce ticket dorénavant, ou bien inclure la balise <b>{{ ticket.merged_to.ticket }}</b> dans le sujet de votre réponse par mail.</p>""",
locale='fr'
)
def reverse_func(apps, schema_editor):
EmailTemplate = apps.get_model("helpdesk", "EmailTemplate")
db_alias = schema_editor.connection.alias
EmailTemplate.objects.using(db_alias).filter(template_name='merged').delete()
class Migration(migrations.Migration):
dependencies = [
('helpdesk', '0033_ticket_merged_to'),
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
| 51.85
| 309
| 0.68081
| 446
| 3,111
| 4.656951
| 0.289238
| 0.050072
| 0.074145
| 0.077034
| 0.811748
| 0.753009
| 0.731825
| 0.731825
| 0.701011
| 0.596052
| 0
| 0.010798
| 0.166506
| 3,111
| 59
| 310
| 52.728814
| 0.790204
| 0.037287
| 0
| 0.243902
| 1
| 0.243902
| 0.649616
| 0.115346
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.02439
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
35f71da7a60b0e76328e3cbd8e37c90319931178
| 1,553
|
py
|
Python
|
Libs/data_set_4.py
|
VahidHeidari/StrBEAM
|
35ba1cd0ecf6f4b89eaff8e23baddae133e7fd1b
|
[
"MIT"
] | null | null | null |
Libs/data_set_4.py
|
VahidHeidari/StrBEAM
|
35ba1cd0ecf6f4b89eaff8e23baddae133e7fd1b
|
[
"MIT"
] | null | null | null |
Libs/data_set_4.py
|
VahidHeidari/StrBEAM
|
35ba1cd0ecf6f4b89eaff8e23baddae133e7fd1b
|
[
"MIT"
] | null | null | null |
# Test data set 4
dataC = [
[ 2, 1, 2, 1, ], [ 0, 1, 2, 1, ], [ 1, 2, 2, 1, ], [ 2, 2, 2, 1, ],
[ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ],
[ 2, 2, 2, 1, ], [ 2, 2, 1, 1, ], [ 2, 1, 2, 1, ], [ 0, 1, 2, 1, ],
[ 1, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ],
[ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 1, 1, ],
[ 2, 1, 2, 1, ], [ 0, 1, 2, 1, ], [ 1, 2, 2, 1, ], [ 2, 2, 2, 1, ],
[ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ],
[ 2, 2, 2, 1, ], [ 2, 2, 1, 1, ], [ 2, 1, 2, 0, ], [ 0, 1, 2, 1, ],
[ 1, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ],
[ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 2, 1, ], [ 2, 2, 1, 1, ],
]
dataU = [
[ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ],
[ 0, 0, 1, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ],
[ 0, 1, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ],
[ 0, 0, 0, 0, ], [ 1, 0, 0, 0, ], [ 0, 0, 1, 1, ], [ 0, 0, 0, 0, ],
[ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 1, 0, 0, ], [ 0, 0, 0, 0, ],
[ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 2, 0, 0, 0, ],
[ 0, 0, 1, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ],
[ 0, 1, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ],
[ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 0, 1, 0, ], [ 0, 0, 0, 0, ],
[ 0, 0, 0, 0, ], [ 0, 0, 0, 0, ], [ 0, 1, 0, 0, ], [ 0, 0, 0, 0, ],
]
if __name__ == '__main__':
print('This is a test case file!')
| 47.060606
| 71
| 0.247263
| 336
| 1,553
| 1.119048
| 0.053571
| 0.739362
| 1.013298
| 1.234043
| 0.845745
| 0.845745
| 0.843085
| 0.843085
| 0.843085
| 0.843085
| 0
| 0.334027
| 0.381198
| 1,553
| 32
| 72
| 48.53125
| 0.057232
| 0.009659
| 0
| 0.538462
| 0
| 0
| 0.021512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.038462
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
c422fa1ed01944e22c32dd1f1b9ac9e295e2a94c
| 14,095
|
py
|
Python
|
kubernetes/test/test_com_coreos_monitoring_v1_probe_list.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_com_coreos_monitoring_v1_probe_list.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_com_coreos_monitoring_v1_probe_list.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.com_coreos_monitoring_v1_probe_list import ComCoreosMonitoringV1ProbeList # noqa: E501
from kubernetes.client.rest import ApiException
class TestComCoreosMonitoringV1ProbeList(unittest.TestCase):
"""ComCoreosMonitoringV1ProbeList unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ComCoreosMonitoringV1ProbeList
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.com_coreos_monitoring_v1_probe_list.ComCoreosMonitoringV1ProbeList() # noqa: E501
if include_optional :
return ComCoreosMonitoringV1ProbeList(
api_version = '0',
items = [
kubernetes.client.models.com/coreos/monitoring/v1/probe.com.coreos.monitoring.v1.Probe(
api_version = '0',
kind = '0',
metadata = kubernetes.client.models.v1/object_meta_v2.v1.ObjectMeta_v2(
annotations = {
'key' : '0'
},
cluster_name = '0',
creation_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
deletion_grace_period_seconds = 56,
deletion_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
finalizers = [
'0'
],
generate_name = '0',
generation = 56,
labels = {
'key' : '0'
},
managed_fields = [
kubernetes.client.models.v1/managed_fields_entry.v1.ManagedFieldsEntry(
api_version = '0',
fields_type = '0',
fields_v1 = kubernetes.client.models.fields_v1.fieldsV1(),
manager = '0',
operation = '0',
time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), )
],
name = '0',
namespace = '0',
owner_references = [
kubernetes.client.models.v1/owner_reference_v2.v1.OwnerReference_v2(
api_version = '0',
block_owner_deletion = True,
controller = True,
kind = '0',
name = '0',
uid = '0', )
],
resource_version = '0',
self_link = '0',
uid = '0', ),
spec = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec.com_coreos_monitoring_v1_Probe_spec(
interval = '0',
job_name = '0',
module = '0',
prober = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_prober.com_coreos_monitoring_v1_Probe_spec_prober(
path = '0',
scheme = '0',
url = '0', ),
scrape_timeout = '0',
targets = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets.com_coreos_monitoring_v1_Probe_spec_targets(
ingress = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_ingress.com_coreos_monitoring_v1_Probe_spec_targets_ingress(
namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_ingress_namespace_selector.com_coreos_monitoring_v1_Probe_spec_targets_ingress_namespaceSelector(
any = True,
match_names = [
'0'
], ),
relabeling_configs = [
kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_metric_relabelings.com_coreos_monitoring_v1_PodMonitor_spec_metricRelabelings(
action = '0',
modulus = 56,
regex = '0',
replacement = '0',
separator = '0',
source_labels = [
'0'
],
target_label = '0', )
],
selector = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_ingress_selector.com_coreos_monitoring_v1_Probe_spec_targets_ingress_selector(
match_expressions = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_pod_affinity_pod_affinity_term_label_selector_match_expressions.com_coreos_monitoring_v1_Alertmanager_spec_affinity_podAffinity_podAffinityTerm_labelSelector_matchExpressions(
key = '0',
operator = '0',
values = [
'0'
], )
],
match_labels = {
'key' : '0'
}, ), ),
static_config = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_static_config.com_coreos_monitoring_v1_Probe_spec_targets_staticConfig(
static = [
'0'
], ), ), ), )
],
kind = '0',
metadata = kubernetes.client.models.v1/list_meta.v1.ListMeta(
continue = '0',
remaining_item_count = 56,
resource_version = '0',
self_link = '0', )
)
else :
return ComCoreosMonitoringV1ProbeList(
items = [
kubernetes.client.models.com/coreos/monitoring/v1/probe.com.coreos.monitoring.v1.Probe(
api_version = '0',
kind = '0',
metadata = kubernetes.client.models.v1/object_meta_v2.v1.ObjectMeta_v2(
annotations = {
'key' : '0'
},
cluster_name = '0',
creation_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
deletion_grace_period_seconds = 56,
deletion_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
finalizers = [
'0'
],
generate_name = '0',
generation = 56,
labels = {
'key' : '0'
},
managed_fields = [
kubernetes.client.models.v1/managed_fields_entry.v1.ManagedFieldsEntry(
api_version = '0',
fields_type = '0',
fields_v1 = kubernetes.client.models.fields_v1.fieldsV1(),
manager = '0',
operation = '0',
time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), )
],
name = '0',
namespace = '0',
owner_references = [
kubernetes.client.models.v1/owner_reference_v2.v1.OwnerReference_v2(
api_version = '0',
block_owner_deletion = True,
controller = True,
kind = '0',
name = '0',
uid = '0', )
],
resource_version = '0',
self_link = '0',
uid = '0', ),
spec = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec.com_coreos_monitoring_v1_Probe_spec(
interval = '0',
job_name = '0',
module = '0',
prober = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_prober.com_coreos_monitoring_v1_Probe_spec_prober(
path = '0',
scheme = '0',
url = '0', ),
scrape_timeout = '0',
targets = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets.com_coreos_monitoring_v1_Probe_spec_targets(
ingress = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_ingress.com_coreos_monitoring_v1_Probe_spec_targets_ingress(
namespace_selector = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_ingress_namespace_selector.com_coreos_monitoring_v1_Probe_spec_targets_ingress_namespaceSelector(
any = True,
match_names = [
'0'
], ),
relabeling_configs = [
kubernetes.client.models.com_coreos_monitoring_v1_pod_monitor_spec_metric_relabelings.com_coreos_monitoring_v1_PodMonitor_spec_metricRelabelings(
action = '0',
modulus = 56,
regex = '0',
replacement = '0',
separator = '0',
source_labels = [
'0'
],
target_label = '0', )
],
selector = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_ingress_selector.com_coreos_monitoring_v1_Probe_spec_targets_ingress_selector(
match_expressions = [
kubernetes.client.models.com_coreos_monitoring_v1_alertmanager_spec_affinity_pod_affinity_pod_affinity_term_label_selector_match_expressions.com_coreos_monitoring_v1_Alertmanager_spec_affinity_podAffinity_podAffinityTerm_labelSelector_matchExpressions(
key = '0',
operator = '0',
values = [
'0'
], )
],
match_labels = {
'key' : '0'
}, ), ),
static_config = kubernetes.client.models.com_coreos_monitoring_v1_probe_spec_targets_static_config.com_coreos_monitoring_v1_Probe_spec_targets_staticConfig(
static = [
'0'
], ), ), ), )
],
)
def testComCoreosMonitoringV1ProbeList(self):
"""Test ComCoreosMonitoringV1ProbeList"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 59.978723
| 296
| 0.422136
| 1,021
| 14,095
| 5.449559
| 0.178257
| 0.067937
| 0.143422
| 0.158519
| 0.820633
| 0.80949
| 0.804996
| 0.804996
| 0.798347
| 0.779655
| 0
| 0.041359
| 0.509401
| 14,095
| 234
| 297
| 60.235043
| 0.763268
| 0.009862
| 0
| 0.868932
| 1
| 0
| 0.026938
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.009709
| 0.029126
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c423a31f4d5b000b2f94dbc920f7d1eafe761665
| 1,871
|
py
|
Python
|
algo/models.py
|
PratikMahobiya/silent_trader
|
3cc453650b8d850eaed82e162f4e9c5766d9737b
|
[
"MIT"
] | null | null | null |
algo/models.py
|
PratikMahobiya/silent_trader
|
3cc453650b8d850eaed82e162f4e9c5766d9737b
|
[
"MIT"
] | null | null | null |
algo/models.py
|
PratikMahobiya/silent_trader
|
3cc453650b8d850eaed82e162f4e9c5766d9737b
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class RSI_55_5_MIN(models.Model):
date = models.DateTimeField()
symbol = models.CharField(max_length=100, verbose_name='SYMBOL')
indicate = models.CharField(max_length=100, verbose_name='INDICATE')
type = models.CharField(max_length=100, verbose_name='TYPE')
close = models.FloatField(verbose_name='PRICE')
stoploss = models.FloatField(verbose_name='STOPLOSS')
rsi = models.FloatField(verbose_name='RSI')
rsi_exit_target = models.FloatField(verbose_name='RSI_TARGET', blank=True, null=True, default=None)
difference = models.FloatField(verbose_name='PRICE DIFFERENCE', blank=True, null=True,default=None)
profit = models.FloatField(verbose_name='PROFIT (%)',blank=True,null=True,default=None)
def __int__(self):
return self.id
class Meta:
db_table = 'RSI_55_5_MIN'
class RSI_55_15_MIN(models.Model):
date = models.DateTimeField()
symbol = models.CharField(max_length=100, verbose_name='SYMBOL')
indicate = models.CharField(max_length=100, verbose_name='INDICATE')
type = models.CharField(max_length=100, verbose_name='TYPE')
close = models.FloatField(verbose_name='PRICE')
stoploss = models.FloatField(verbose_name='STOPLOSS')
rsi = models.FloatField(verbose_name='RSI')
rsi_exit_target = models.FloatField(verbose_name='RSI_TARGET', blank=True, null=True, default=None)
difference = models.FloatField(verbose_name='PRICE DIFFERENCE', blank=True, null=True,default=None)
profit = models.FloatField(verbose_name='PROFIT (%)',blank=True,null=True,default=None)
def __int__(self):
return self.id
class Meta:
db_table = 'RSI_55_15_MIN'
| 55.029412
| 107
| 0.674506
| 227
| 1,871
| 5.330396
| 0.202643
| 0.163636
| 0.228099
| 0.267769
| 0.93719
| 0.93719
| 0.93719
| 0.93719
| 0.93719
| 0.93719
| 0
| 0.021666
| 0.210583
| 1,871
| 34
| 108
| 55.029412
| 0.797563
| 0.012827
| 0
| 0.83871
| 0
| 0
| 0.089382
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.032258
| 0.064516
| 0.935484
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
c42e2eec12e6b3aeb6bbae962e709a6a9cb7f183
| 20,914
|
py
|
Python
|
csscms/css_properties.py
|
christabor/csscms
|
aa2504701ace66a6e2489aedab134901cfdc854d
|
[
"MIT"
] | 3
|
2015-07-08T03:59:41.000Z
|
2015-09-18T01:59:04.000Z
|
csscms/css_properties.py
|
christabor/csscms
|
aa2504701ace66a6e2489aedab134901cfdc854d
|
[
"MIT"
] | null | null | null |
csscms/css_properties.py
|
christabor/csscms
|
aa2504701ace66a6e2489aedab134901cfdc854d
|
[
"MIT"
] | 1
|
2020-11-26T00:31:43.000Z
|
2020-11-26T00:31:43.000Z
|
rules = {'print-pagebi': {'values': ['auto', 'avoid', 'initial', 'inherit'], 'dropdown': True}, 'flex-direction': {'values': ['row', 'row-reverse', 'column', 'column-reverse', 'initial', 'inherit'], 'dropdown': True}, 'print-pageba': {'values': ['auto', 'always', 'avoid', 'left', 'right', 'initial', 'inherit'], 'dropdown': True}, 'print-pagebb': {'values': ['auto', 'always', 'avoid', 'left', 'right', 'initial', 'inherit'], 'dropdown': True}, 'border-top': {'values': ['border-top-width', 'border-top-style', 'border-top-color', 'initial', 'inherit'], 'dropdown': True}, 'top': {'values': ['auto', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'nav-right': {'values': ['auto', 'id', 'target-name', 'initial', 'inherit'], 'dropdown': True}, 'flex-wrap': {'values': ['nowrap', 'wrap', 'wrap-reverse', 'initial', 'inherit'], 'dropdown': True}, 'background-size': {'values': ['auto', 'length', 'percentage', 'cover', 'contain', 'initial', 'inherit'], 'dropdown': True}, 'nav-left': {'values': ['auto', 'id', 'target-name', 'initial', 'inherit'], 'dropdown': True}, 'list-style-image': {'values': ['none', 'url', 'initial', 'inherit'], 'dropdown': True}, 'background-origin': {'values': ['padding-box', 'border-box', 'content-box', 'initial', 'inherit'], 'dropdown': True}, 'nav-up': {'values': ['auto', 'id', 'target-name', 'initial', 'inherit'], 'dropdown': True}, 'text-decoration-color': {'values': ['color', 'initial', 'inherit'], 'dropdown': True}, 'color': {'values': ['color', 'initial', 'inherit'], 'dropdown': True}, 'animation': {'values': ['animation-name', 'animation-duration', 'animation-timing-function', 'animation-delay', 'animation-iteration-count', 'animation-direction', 'animation-fill-mode', 'animation-play-state', 'initial', 'inherit'], 'dropdown': True}, 'counter-increment': {'values': ['none', 'id number', 'initial', 'inherit'], 'dropdown': True}, 'justify-content': {'values': ['flex-start', 'flex-end', 'center', 'space-between', 'space-around', 'initial', 'inherit'], 'dropdown': True}, 'columns': {'values': ['auto', 'column-width', 'column-count', 'initial', 'inherit'], 'dropdown': True}, 'border-radius': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'flex': {'values': ['flex-grow', 'flex-shrink', 'flex-basis', 'auto', 'initial', 'none', 'inherit'], 'dropdown': True}, 'box-shadow': {'values': ['none', 'h-shadow', 'v-shadow', 'blur', 'spread', 'color', 'inset', 'initial', 'inherit'], 'dropdown': True}, 'cursor': {'values': ['alias', 'all-scroll', 'auto', 'cell', 'context-menu', 'col-resize', 'copy', 'crosshair', 'default', 'e-resize', 'ew-resize', 'grab', 'grabbing', 'help', 'move', 'n-resize', 'ne-resize', 'nesw-resize', 'ns-resize', 'nw-resize', 'nwse-resize', 'no-drop', 'none', 'not-allowed', 'pointer', 'progress', 'row-resize', 's-resize', 'se-resize', 'sw-resize', 'text', 'URL', 'vertical-text', 'w-resize', 'wait', 'zoom-in', 'zoom-out', 'initial', 'inherit'], 'dropdown': True}, 'align-self': {'values': ['auto', 'stretch', 'center', 'flex-start', 'flex-end', 'baseline', 'initial', 'inherit'], 'dropdown': True}, 'letter-spacing': {'values': ['normal', 'length', 'initial', 'inherit'], 'dropdown': True}, 'column-rule': {'values': ['column-rule-width', 'column-rule-style', 'column-rule-color', 'initial', 'inherit'], 'dropdown': True}, 'word-wrap': {'values': ['normal', 'break-word', 'initial', 'inherit'], 'dropdown': True}, 'border-bottom': {'values': ['border-bottom-width', 'border-bottom-style', 'border-bottom-color', 'initial', 'inherit'], 'dropdown': True}, 'border-spacing': {'values': ['length-h', 'length-v', 'initial', 'inherit'], 'dropdown': True}, 'counter-reset': {'values': ['none', 'name', 'number', 'initial', 'inherit'], 'dropdown': True}, 'column-rule-width': {'values': ['medium', 'thin', 'thick', 'length', 'initial', 'inherit'], 'dropdown': True}, 'background': {'values': ['background-color', 'background-position', 'background-size', 'background-repeat', 'background-origin', 'background-clip', 'background-attachment', 'background-image', 'initial', 'inherit'], 'dropdown': True}, 'list-style-type': {'values': ['disc', 'armenian', 'circle', 'cjk-ideographic', 'decimal', 'decimal-leading-zero', 'georgian', 'hebrew', 'hiragana', 'hiragana-iroha', 'katakana', 'katakana-iroha', 'lower-alpha', 'lower-greek', 'lower-latin', 'lower-roman', 'none', 'square', 'upper-alpha', 'upper-latin', 'upper-roman', 'initial', 'inherit'], 'dropdown': True}, 'max-height': {'values': ['none', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'animation-fill-mode': {'values': ['none', 'forwards', 'backwards', 'both', 'initial', 'inherit'], 'dropdown': True}, 'perspective-origin': {'values': ['x-axis', 'y-axis', 'initial', 'inherit'], 'dropdown': True}, 'quotes': {'values': ['none', 'string string string string', 'initial', 'inherit'], 'dropdown': True}, 'font-family': {'values': ['family-name generic-family', 'initial', 'inherit'], 'dropdown': True}, 'vertical-align': {'values': ['baseline', 'length', '%', 'sub', 'super', 'top', 'text-top', 'middle', 'bottom', 'text-bottom', 'initial', 'inherit'], 'dropdown': True}, 'size': {'values': ['number', 'length', 'initial', 'inherit'], 'dropdown': True}, 'border-bottom-color': {'values': ['color', 'transparent', 'initial', 'inherit'], 'dropdown': True}, 'border-bottom-right-radius': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'padding-left': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'text-align-last': {'values': ['auto', 'left', 'right', 'center', 'justify', 'start', 'end', 'initial', 'inherit'], 'dropdown': True}, 'transform': {'values': ['none', 'matrix( n,n,n,n,n,n )', 'matrix3d ( n,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n )', 'translate( x,y )', 'translate3d( x,y,z )', 'translateX( x )', 'translateY( y )', 'translateZ( z )', 'scale( x,y )', 'scale3d( x,y,z )', 'scaleX( x )', 'scaleY( y )', 'scaleZ( z )', 'rotate( angle )', 'rotate3d( x,y,z,angle )', 'rotateX( angle )', 'rotateY( angle )', 'rotateZ( angle )', 'skew( x-angle,y-angle )', 'skewX( angle )', 'skewY( angle )', 'perspective( n )', 'initial', 'inherit'], 'dropdown': False}, 'outline-color': {'values': ['invert', 'color', 'initial', 'inherit'], 'dropdown': True}, 'font-style': {'values': ['normal', 'italic', 'oblique', 'initial', 'inherit'], 'dropdown': True}, 'nav-index': {'values': ['auto', 'number', 'initial', 'inherit'], 'dropdown': True}, 'column-count': {'values': ['number', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'transform-style': {'values': ['flat', 'preserve-3d', 'initial', 'inherit'], 'dropdown': True}, 'hanging-punctuation': {'values': ['none', 'first', 'last', 'allow-end', 'force-end', 'initial', 'inherit'], 'dropdown': True}, 'perspective': {'values': ['length', 'none', 'initial', 'inherit'], 'dropdown': True}, 'column-width': {'values': ['auto', 'length', 'initial', 'inherit'], 'dropdown': True}, 'padding-top': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'font-face-rule': {'values': ['font-family', 'src', 'font-stretch', 'font-style', 'font-weight', 'unicode-range'], 'dropdown': True}, 'border-image-repeat': {'values': ['stretch', 'repeat', 'round', 'space', 'initial', 'inherit'], 'dropdown': True}, 'outline-style': {'values': ['none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', 'inset', 'outset', 'initial', 'inherit'], 'dropdown': True}, 'text-direction': {'values': ['ltr', 'rtl', 'initial', 'inherit'], 'dropdown': True}, 'width': {'values': ['auto', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-right-style': {'values': ['none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', 'inset', 'outset', 'initial', 'inherit'], 'dropdown': True}, 'text-indent': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'padding-right': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-left-style': {'values': ['none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', 'inset', 'outset', 'initial', 'inherit'], 'dropdown': True}, 'font-variant': {'values': ['normal', 'small-caps', 'initial', 'inherit'], 'dropdown': True}, 'background-color': {'values': ['color', 'transparent', 'initial', 'inherit'], 'dropdown': True}, 'flex-flow': {'values': ['flex-direction', 'flex-wrap', 'initial', 'inherit'], 'dropdown': True}, 'text-align': {'values': ['left', 'right', 'center', 'justify', 'initial', 'inherit'], 'dropdown': True}, 'flex-grow': {'values': ['number', 'initial', 'inherit'], 'dropdown': True}, 'animation-delay': {'values': ['time', 'initial', 'inherit'], 'dropdown': True}, 'text-decoration': {'values': ['none', 'underline', 'overline', 'line-through', 'initial', 'inherit'], 'dropdown': True}, 'text-shadow': {'values': ['h-shadow', 'v-shadow', 'blur', 'color', 'none', 'initial', 'inherit'], 'dropdown': True}, 'min-height': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-image': {'values': ['border-image-source', 'border-image-slice', 'border-image-width', 'border-image-outset', 'border-image-repeat', 'initial', 'inherit'], 'dropdown': True}, 'animation-direction': {'values': ['normal', 'reverse', 'alternate', 'alternate-reverse', 'initial', 'inherit'], 'dropdown': True}, 'animation-keyframes': {'values': ['animationname', 'keyframes-selector', 'css-styles'], 'dropdown': True}, 'animation-duration': {'values': ['time', 'initial', 'inherit'], 'dropdown': True}, 'transition': {'values': ['transition-property', 'transition-duration', 'transition-timing-function', 'transition-delay', 'initial', 'inherit'], 'dropdown': False}, 'right': {'values': ['auto', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-right-width': {'values': ['medium', 'thin', 'thick', 'length', 'initial', 'inherit'], 'dropdown': True}, 'border-image-width': {'values': ['length', 'number', '%', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'clear': {'values': ['none', 'left', 'right', 'both', 'initial', 'inherit'], 'dropdown': True}, 'flex-shrink': {'values': ['number', 'initial', 'inherit'], 'dropdown': True}, 'text-unicode-bidi': {'values': ['normal', 'embed', 'bidi-override', 'initial', 'inherit'], 'dropdown': True}, 'border-top-style': {'values': ['none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', 'inset', 'outset', 'initial', 'inherit'], 'dropdown': True}, 'float': {'values': ['none', 'left', 'right', 'initial', 'inherit'], 'dropdown': True}, 'text-justify': {'values': ['auto', 'inter-word', 'inter-ideograph', 'inter-cluster', 'distribute', 'kashida', 'trim', 'none', 'initial', 'inherit'], 'dropdown': True}, 'align-items': {'values': ['stretch', 'center', 'flex-start', 'flex-end', 'baseline', 'initial', 'inherit'], 'dropdown': True}, 'border-collapse': {'values': ['separate', 'collapse', 'initial', 'inherit'], 'dropdown': True}, 'border-bottom-width': {'values': ['medium', 'thin', 'thick', 'length', 'initial', 'inherit'], 'dropdown': True}, 'content': {'values': ['normal', 'none', 'counter', 'attr (attribute)', 'string', 'open-quote', 'close-quote', 'no-open-quote', 'no-close-quote', 'url', 'initial', 'inherit'], 'dropdown': True}, 'z-index': {'values': ['auto', 'number', 'initial', 'inherit'], 'dropdown': True}, 'outline-offset': {'values': ['length', 'initial', 'inherit'], 'dropdown': True}, 'margin-right': {'values': ['length', '%', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'border-right': {'values': ['border-right-width', 'border-right-style', 'border-right-color', 'initial', 'inherit'], 'dropdown': True}, 'border-top-width': {'values': ['medium', 'thin', 'thick', 'length', 'initial', 'inherit'], 'dropdown': True}, 'empty-cells': {'values': ['show', 'hide', 'initial', 'inherit'], 'dropdown': True}, 'background-clip': {'values': ['border-box', 'padding-box', 'content-box', 'initial', 'inherit'], 'dropdown': True}, 'border-bottom-left-radius': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'column-fill': {'values': ['balance', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'font': {'values': ['font-style', 'font-variant', 'font-weight', 'font-size/line-height', 'font-family', 'caption', 'icon', 'menu', 'message-box', 'small-caption', 'status-bar', 'initial', 'inherit'], 'dropdown': True}, 'border-width': {'values': ['medium', 'thin', 'thick', 'length', 'initial', 'inherit'], 'dropdown': True}, 'line-height': {'values': ['normal', 'number', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'min-width': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'outline-width': {'values': ['medium', 'thin', 'thick', 'length', 'initial', 'inherit'], 'dropdown': True}, 'border-top-color': {'values': ['color', 'transparent', 'initial', 'inherit'], 'dropdown': True}, 'background-position': {'values': ['left top', 'left center', 'left bottom', 'right top', 'right center', 'right bottom', 'center top', 'center center', 'center bottom', 'x%', 'y%', 'xpos' ,'ypos', 'initial', 'inherit'], 'dropdown': True}, 'white-space': {'values': ['normal', 'nowrap', 'pre', 'pre-line', 'pre-wrap', 'initial', 'inherit'], 'dropdown': True}, 'border-left': {'values': ['border-left-width', 'border-left-style', 'border-left-color', 'initial', 'inherit'], 'dropdown': True}, 'animation-play-state': {'values': ['paused', 'running', 'initial', 'inherit'], 'dropdown': True}, 'transition-delay': {'values': ['time', 'initial', 'inherit'], 'dropdown': True}, 'padding': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-style': {'values': ['none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', 'inset', 'outset', 'initial', 'inherit'], 'dropdown': True}, 'animation-iteration-count': {'values': ['number', 'infinite', 'initial', 'inherit'], 'dropdown': True}, 'background-attachment': {'values': ['scroll', 'fixed', 'local', 'initial', 'inherit'], 'dropdown': True}, 'position': {'values': ['static', 'absolute', 'fixed', 'relative', 'initial', 'inherit'], 'dropdown': True}, 'text-decoration-style': {'values': ['solid', 'double', 'dotted', 'dashed', 'wavy', 'initial', 'inherit'], 'dropdown': True}, 'resize': {'values': ['none', 'both', 'horizontal', 'vertical', 'initial', 'inherit'], 'dropdown': True}, 'outline': {'values': ['outline-color', 'outline-style', 'outline-width', 'initial', 'inherit'], 'dropdown': True}, 'max-width': {'values': ['none', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'font-size-adjust': {'values': ['number', 'none', 'initial', 'inherit'], 'dropdown': True}, 'column-rule-style': {'values': ['none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', 'inset', 'outset', 'initial', 'inherit'], 'dropdown': True}, 'text-transform': {'values': ['none', 'capitalize', 'uppercase', 'lowercase', 'initial', 'inherit'], 'dropdown': True}, 'caption-side': {'values': ['top', 'bottom', 'initial', 'inherit'], 'dropdown': True}, 'word-spacing': {'values': ['normal', 'length', 'initial', 'inherit'], 'dropdown': True}, 'margin': {'values': ['length', '%', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'border-left-width': {'values': ['medium', 'thin', 'thick', 'length', 'initial', 'inherit'], 'dropdown': True}, 'border-top-left-radius': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-color': {'values': ['color', 'transparent', 'initial', 'inherit'], 'dropdown': True}, 'column-span': {'values': ['1', 'all', 'initial', 'inherit'], 'dropdown': True}, 'list-style': {'values': ['list-style-type', 'list-style-position', 'list-style-image', 'initial', 'inherit'], 'dropdown': True}, 'padding-bottom': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'column-gap': {'values': ['length', 'normal', 'initial', 'inherit'], 'dropdown': True}, 'transform-origin': {'values': ['x-axis', 'y-axis', 'z-axis', 'initial', 'inherit'], 'dropdown': True}, 'animation-name': {'values': ['keyframename', 'none', 'initial', 'inherit'], 'dropdown': True}, 'text-decoration-line': {'values': ['none', 'underline', 'overline', 'line-through', 'initial', 'inherit'], 'dropdown': True}, 'nav-down': {'values': ['auto', 'id', 'target-name', 'initial', 'inherit'], 'dropdown': True}, 'overflow': {'values': ['visible', 'hidden', 'scroll', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'margin-left': {'values': ['length', '%', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'left': {'values': ['auto', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'visibility': {'values': ['visible', 'hidden', 'collapse', 'initial', 'inherit'], 'dropdown': True}, 'border': {'values': ['border-width', 'border-style', 'border-color', 'initial', 'inherit'], 'dropdown': True}, 'border-image-slice': {'values': ['number', '%', 'fill', 'initial', 'inherit'], 'dropdown': True}, 'border-left-color': {'values': ['color', 'transparent', 'initial', 'inherit'], 'dropdown': True}, 'display': {'values': ['inline', 'block', 'flex', 'inline-block', 'inline-flex', 'inline-table', 'list-item', 'run-in', 'table', 'table-caption', 'table-column-group', 'table-header-group', 'table-footer-group', 'table-row-group', 'table-cell', 'table-column', 'table-row', 'none', 'initial', 'inherit'], 'dropdown': True}, 'overflow-y': {'values': ['visible', 'hidden', 'scroll', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'overflow-x': {'values': ['visible', 'hidden', 'scroll', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'word-break': {'values': ['normal', 'break-all', u'keep-all', 'initial', 'inherit'], 'dropdown': True}, 'background-repeat': {'values': ['repeat', 'repeat-x', 'repeat-y', 'no-repeat', 'initial', 'inherit'], 'dropdown': True}, 'flex-basis': {'values': ['number', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'height': {'values': ['auto', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'text-overflow': {'values': ['clip', 'ellipsis', 'string', 'initial', 'inherit'], 'dropdown': True}, 'margin-bottom': {'values': ['length', '%', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'clip': {'values': ['auto', 'shape', 'initial', 'inherit'], 'dropdown': True}, 'font-stretch': {'values': ['ultra-condensed', 'extra-condensed', 'condensed', 'semi-condensed', 'normal', 'semi-expanded', 'expanded', 'extra-expanded', 'ultra-expanded', 'initial', 'inherit'], 'dropdown': True}, 'bottom': {'values': ['auto', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-top-right-radius': {'values': ['length', '%', 'initial', 'inherit'], 'dropdown': True}, 'border-image-outset': {'values': ['length', 'number', 'initial', 'inherit'], 'dropdown': True}, 'font-weight': {'values': ['normal', 'bold', 'bolder', 'lighter', '100', '200', '300', '400', '500', '600', '700', '800', '900', 'initial', 'inherit'], 'dropdown': True}, 'opacity': {'values': ['number', 'initial', 'inherit'], 'dropdown': True}, 'table-layout': {'values': ['auto', 'fixed', 'initial', 'inherit'], 'dropdown': True}, 'border-right-color': {'values': ['color', 'transparent', 'initial', 'inherit'], 'dropdown': True}, 'column-rule-color': {'values': ['color', 'initial', 'inherit'], 'dropdown': True}, 'transition-property': {'values': ['none', 'all', 'property', 'initial', 'inherit'], 'dropdown': True}, 'align-content': {'values': ['stretch', 'center', 'flex-start', 'flex-end', 'space-between', 'space-around', 'initial', 'inherit'], 'dropdown': True}, 'backface-visibility': {'values': ['visible', 'hidden', 'initial', 'inherit'], 'dropdown': True}, 'icon': {'values': ['auto', 'URL', 'initial', 'inherit'], 'dropdown': True}, 'background-image': {'values': ['url', 'none', 'initial', 'inherit'], 'dropdown': True}, 'border-bottom-style': {'values': ['none', 'hidden', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', 'inset', 'outset', 'initial', 'inherit'], 'dropdown': True}, 'animation-timing-function': {'values': ['linear', 'ease', 'ease-in', 'ease-out', 'ease-in-out', 'cubic-bezier( n , n , n , n )', 'initial', 'inherit'], 'dropdown': True}, 'border-image-source': {'values': ['none', 'image', 'initial', 'inherit'], 'dropdown': True}, 'box-sizing': {'values': ['content-box', 'border-box', 'initial', 'inherit'], 'dropdown': True}, 'transition-duration': {'values': ['time', 'initial', 'inherit'], 'dropdown': True}, 'margin-top': {'values': ['length', '%', 'auto', 'initial', 'inherit'], 'dropdown': True}, 'font-size': {'values': ['medium', 'xx-small', 'x-small', 'small', 'large', 'x-large', 'xx-large', 'smaller', 'larger', 'length', '%', 'initial', 'inherit'], 'dropdown': True}, 'transition-timing-function': {'values': ['ease', 'linear', 'ease-in', 'ease-out', 'ease-in-out', 'cubic-bezier( n , n , n , n )', 'initial', 'inherit'], 'dropdown': True}, 'order': {'values': ['number', 'initial', 'inherit'], 'dropdown': True}, 'list-style-position': {'values': ['inside', 'outside', 'initial', 'inherit'], 'dropdown': True}}
| 10,457
| 20,913
| 0.612939
| 2,251
| 20,914
| 5.694802
| 0.157708
| 0.204774
| 0.298619
| 0.348857
| 0.603479
| 0.453155
| 0.330135
| 0.246119
| 0.171074
| 0.125751
| 0
| 0.001732
| 0.088744
| 20,914
| 1
| 20,914
| 20,914
| 0.670899
| 0
| 0
| 0
| 0
| 1
| 0.608205
| 0.018361
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
c478b6a213ee6ebd66cfc63ec77735b870d3d30f
| 14,561
|
py
|
Python
|
frameworks/helloworld/tests/test_canary_strategy.py
|
minyk/dcos-ceph
|
0ed185c996c6bc242feb73121b7e7fbcf9dd3ac1
|
[
"Apache-2.0"
] | null | null | null |
frameworks/helloworld/tests/test_canary_strategy.py
|
minyk/dcos-ceph
|
0ed185c996c6bc242feb73121b7e7fbcf9dd3ac1
|
[
"Apache-2.0"
] | null | null | null |
frameworks/helloworld/tests/test_canary_strategy.py
|
minyk/dcos-ceph
|
0ed185c996c6bc242feb73121b7e7fbcf9dd3ac1
|
[
"Apache-2.0"
] | null | null | null |
import logging
import pytest
import sdk_cmd
import sdk_install
import sdk_marathon
import sdk_plan
import sdk_tasks
import sdk_utils
import shakedown
from tests import config
log = logging.getLogger(__name__)
# global pytest variable applicable to whole module
pytestmark = pytest.mark.dcos_min_version('1.9')
@pytest.fixture(scope='module', autouse=True)
def configure_package(configure_security):
try:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
# due to canary: no tasks should launch, and suppressed shouldn't be set
sdk_install.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
0,
additional_options={
'service': {'spec_file': 'examples/canary.yml'},
'hello': {'count': 4},
'world': {'count': 4}
},
wait_for_deployment=False)
yield # let the test session execute
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
@pytest.mark.sanity
def test_canary_init():
def fn():
# check for empty list internally rather than returning empty list.
# otherwise shakedown.wait_for() will keep going...
return sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == []
assert shakedown.wait_for(fn, noisy=True, timeout_seconds=10 * 60)
pl = sdk_plan.wait_for_plan_status(config.SERVICE_NAME, 'deploy', 'WAITING')
log.info(pl)
assert pl['status'] == 'WAITING'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'WAITING'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
phase = pl['phases'][1]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'WAITING'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
@pytest.mark.sanity
def test_canary_first():
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy hello-deploy')
expected_tasks = ['hello-0']
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
# do not use service_plan always
# when here, plan should always return properly
pl = sdk_plan.wait_for_completed_step(config.SERVICE_NAME, 'deploy', 'hello-deploy', 'hello-0:[server]')
log.info(pl)
assert pl['status'] == 'WAITING'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
phase = pl['phases'][1]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'WAITING'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
@pytest.mark.sanity
def test_canary_plan_continue_noop():
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy')
# the plan doesn't have the waiting bit set, so telling it to continue should be a no-op
# (the plan is currently just in WAITING for display purposes)
expected_tasks = ['hello-0']
try:
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks) + 1, timeout_seconds=30)
assert False, "Shouldn't have deployed a second task"
except AssertionError as arg:
raise arg
except:
pass # expected
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
@pytest.mark.sanity
def test_canary_second():
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy world-deploy')
sdk_plan.wait_for_step_status(config.SERVICE_NAME, 'deploy', 'world-deploy', 'world-0:[server]', 'PENDING')
# because the plan strategy is serial, the second phase just clears a wait bit without
# proceeding to launch anything:
expected_tasks = ['hello-0']
try:
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks) + 1, timeout_seconds=30)
assert False, "Shouldn't have deployed a second task"
except AssertionError as arg:
raise arg
except:
pass # expected
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
pl = sdk_plan.get_deployment_plan(config.SERVICE_NAME)
log.info(pl)
assert pl['status'] == 'WAITING'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
phase = pl['phases'][1]
assert phase['status'] == 'PENDING'
steps2 = phase['steps']
assert len(steps) == 4
assert steps2[0]['status'] == 'PENDING'
assert steps2[1]['status'] == 'WAITING'
assert steps2[2]['status'] == 'PENDING'
assert steps2[3]['status'] == 'PENDING'
@pytest.mark.sanity
def test_canary_third():
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy hello-deploy')
expected_tasks = [
'hello-0', 'hello-1', 'hello-2', 'hello-3',
'world-0']
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
pl = sdk_plan.wait_for_completed_phase(config.SERVICE_NAME, 'deploy', 'hello-deploy')
log.info(pl)
assert pl['status'] == 'WAITING'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
phase = pl['phases'][1]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
@pytest.mark.sanity
def test_canary_fourth():
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy world-deploy')
expected_tasks = [
'hello-0', 'hello-1', 'hello-2', 'hello-3',
'world-0', 'world-1', 'world-2', 'world-3']
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
pl = sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, 'deploy')
log.info(pl)
assert pl['status'] == 'COMPLETE'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
phase = pl['phases'][1]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
@pytest.mark.sanity
def test_increase_count():
sdk_marathon.bump_task_count_config(config.SERVICE_NAME, 'HELLO_COUNT')
expected_tasks = [
'hello-0', 'hello-1', 'hello-2', 'hello-3',
'world-0', 'world-1', 'world-2', 'world-3']
try:
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks) + 1, timeout_seconds=60)
assert False, "Should not start task now"
except AssertionError as arg:
raise arg
except:
pass # expected to fail
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
pl = sdk_plan.wait_for_plan_status(config.SERVICE_NAME, 'deploy', 'WAITING')
log.info(pl)
assert pl['status'] == 'WAITING'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 5
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
assert steps[4]['status'] == 'WAITING'
phase = pl['phases'][1]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy hello-deploy')
expected_tasks = [
'hello-0', 'hello-1', 'hello-2', 'hello-3', 'hello-4',
'world-0', 'world-1', 'world-2', 'world-3']
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
pl = sdk_plan.wait_for_plan_status(config.SERVICE_NAME, 'deploy', 'COMPLETE')
log.info(pl)
assert pl['status'] == 'COMPLETE'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 5
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
assert steps[4]['status'] == 'COMPLETE'
phase = pl['phases'][1]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
@pytest.mark.sanity
def test_increase_cpu():
hello_0_ids = sdk_tasks.get_task_ids(config.SERVICE_NAME, 'hello-0-server')
config.bump_hello_cpus()
pl = sdk_plan.wait_for_plan_status(config.SERVICE_NAME, 'deploy', 'WAITING')
log.info(pl)
assert pl['status'] == 'WAITING'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 5
assert steps[0]['status'] == 'WAITING'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
assert steps[4]['status'] == 'PENDING'
phase = pl['phases'][1]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
# check that all prior tasks are still running, no changes yet
expected_tasks = [
'hello-0', 'hello-1', 'hello-2', 'hello-3', 'hello-4',
'world-0', 'world-1', 'world-2', 'world-3']
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
assert sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod list', json=True) == expected_tasks
assert hello_0_ids == sdk_tasks.get_task_ids(config.SERVICE_NAME, 'hello-0-server')
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy hello-deploy')
sdk_tasks.check_tasks_updated(config.SERVICE_NAME, 'hello-0-server', hello_0_ids)
sdk_tasks.check_running(config.SERVICE_NAME, len(expected_tasks))
pl = sdk_plan.wait_for_step_status(config.SERVICE_NAME, 'deploy', 'hello-deploy', 'hello-0:[server]', 'COMPLETE')
log.info(pl)
assert pl['status'] == 'WAITING'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'WAITING'
steps = phase['steps']
assert len(steps) == 5
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'WAITING'
assert steps[2]['status'] == 'PENDING'
assert steps[3]['status'] == 'PENDING'
assert steps[4]['status'] == 'PENDING'
phase = pl['phases'][1]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
hello_1_ids = sdk_tasks.get_task_ids(config.SERVICE_NAME, 'hello-1-server')
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'plan continue deploy hello-deploy')
sdk_tasks.check_tasks_updated(config.SERVICE_NAME, 'hello-1-server', hello_1_ids)
pl = sdk_plan.wait_for_completed_deployment(config.SERVICE_NAME)
log.info(pl)
assert pl['status'] == 'COMPLETE'
assert len(pl['phases']) == 2
phase = pl['phases'][0]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 5
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
assert steps[4]['status'] == 'COMPLETE'
phase = pl['phases'][1]
assert phase['status'] == 'COMPLETE'
steps = phase['steps']
assert len(steps) == 4
assert steps[0]['status'] == 'COMPLETE'
assert steps[1]['status'] == 'COMPLETE'
assert steps[2]['status'] == 'COMPLETE'
assert steps[3]['status'] == 'COMPLETE'
| 34.423168
| 117
| 0.64556
| 1,914
| 14,561
| 4.76698
| 0.093521
| 0.097655
| 0.091298
| 0.109601
| 0.848531
| 0.847545
| 0.827269
| 0.820583
| 0.820583
| 0.787374
| 0
| 0.018072
| 0.190578
| 14,561
| 422
| 118
| 34.504739
| 0.756067
| 0.048211
| 0
| 0.806154
| 0
| 0
| 0.208986
| 0
| 0
| 0
| 0
| 0
| 0.495385
| 1
| 0.030769
| false
| 0.009231
| 0.030769
| 0.003077
| 0.064615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6729c603b4e259b7e2f7b2b25a3b1e60e42cb6ba
| 158
|
py
|
Python
|
packages/watchmen-meta/src/watchmen_meta/dqc/__init__.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-meta/src/watchmen_meta/dqc/__init__.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-meta/src/watchmen_meta/dqc/__init__.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
from .catalog_service import CatalogService
from .monitor_rule_lock_service import MonitorJobLockService
from .monitor_rule_service import MonitorRuleService
| 39.5
| 60
| 0.905063
| 18
| 158
| 7.611111
| 0.555556
| 0.284672
| 0.218978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075949
| 158
| 3
| 61
| 52.666667
| 0.938356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
674a82f844c4047c82a5e6558c4e7db93aae1c8d
| 241
|
py
|
Python
|
gym-dubins-airplane/gym_dubins_airplane/envs/__init__.py
|
Cenderme/super-octo-waddle
|
723b838487dd8127f79b4797f76d427c928f56da
|
[
"MIT"
] | null | null | null |
gym-dubins-airplane/gym_dubins_airplane/envs/__init__.py
|
Cenderme/super-octo-waddle
|
723b838487dd8127f79b4797f76d427c928f56da
|
[
"MIT"
] | null | null | null |
gym-dubins-airplane/gym_dubins_airplane/envs/__init__.py
|
Cenderme/super-octo-waddle
|
723b838487dd8127f79b4797f76d427c928f56da
|
[
"MIT"
] | 1
|
2021-03-28T16:06:47.000Z
|
2021-03-28T16:06:47.000Z
|
import config
import ACEnvironment
from gym_dubins_airplane.envs.DubinsAC2Denv import DubinsAC2Denv
from gym_dubins_airplane.envs.DubinsAC3Denv import DubinsAC3Denv
from gym_dubins_airplane.envs.DubinsAC3Denvv1 import DubinsAC3Denvv1
| 34.428571
| 69
| 0.879668
| 28
| 241
| 7.357143
| 0.392857
| 0.101942
| 0.18932
| 0.305825
| 0.364078
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036697
| 0.095436
| 241
| 6
| 70
| 40.166667
| 0.908257
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
675aa12f56c1ccf729e2f998dcb333448903c079
| 102
|
py
|
Python
|
servercraft/html_model/__init__.py
|
jumphone/ServerCraft
|
a5031d433a8344229411602fd7257f231f4e92d6
|
[
"Apache-2.0"
] | 1
|
2016-11-02T22:27:22.000Z
|
2016-11-02T22:27:22.000Z
|
servercraft/html_model/__init__.py
|
jumphone/ServerCraft
|
a5031d433a8344229411602fd7257f231f4e92d6
|
[
"Apache-2.0"
] | null | null | null |
servercraft/html_model/__init__.py
|
jumphone/ServerCraft
|
a5031d433a8344229411602fd7257f231f4e92d6
|
[
"Apache-2.0"
] | null | null | null |
from base_html import *
from index_html import *
from waiting_html import *
from result_html import *
| 20.4
| 26
| 0.803922
| 16
| 102
| 4.875
| 0.4375
| 0.512821
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 102
| 4
| 27
| 25.5
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
67760b2336cfed6b1ceb3c4c22369bce6d859c84
| 11,108
|
py
|
Python
|
kubectl_rbac/tests/audited_permissions.py
|
octarinesec/kubectl-rbac
|
9a8e006c5e646dac258ba5bdcaf44ca20e89cc87
|
[
"MIT"
] | 32
|
2018-06-15T16:01:40.000Z
|
2020-12-17T19:42:16.000Z
|
kubectl_rbac/tests/audited_permissions.py
|
octarinesec/kubectl-rbac
|
9a8e006c5e646dac258ba5bdcaf44ca20e89cc87
|
[
"MIT"
] | 1
|
2018-12-23T20:44:59.000Z
|
2018-12-23T20:44:59.000Z
|
kubectl_rbac/tests/audited_permissions.py
|
octarinesec/kubectl-rbac
|
9a8e006c5e646dac258ba5bdcaf44ca20e89cc87
|
[
"MIT"
] | 6
|
2018-06-15T13:02:57.000Z
|
2022-01-16T20:13:22.000Z
|
TEST_AUDITED_PERMISSIONS = {"api": {"io.k8s.get"},
"api/v1": {"io.k8s.get"},
"apis": {"io.k8s.get"},
"apis/apiextensions.k8s.io/v1beta1": {"io.k8s.get"},
"apis/apiregistration.k8s.io/v1beta1": {"io.k8s.get"},
"apis/apps/v1beta1": {"io.k8s.get"},
"apis/apps/v1beta2": {"io.k8s.get"},
"apis/authentication.k8s.io/v1": {"io.k8s.get"},
"apis/authentication.k8s.io/v1beta1": {"io.k8s.get"},
"apis/authorization.k8s.io/v1": {"io.k8s.get"},
"apis/authorization.k8s.io/v1beta1": {"io.k8s.get"},
"apis/autoscaling/v1": {"io.k8s.get"},
"apis/autoscaling/v2beta1": {"io.k8s.get"},
"apis/batch/v1": {"io.k8s.get"},
"apis/batch/v1beta1": {"io.k8s.get"},
"apis/certificates.k8s.io/v1beta1": {"io.k8s.get"},
"apis/extensions/v1beta1": {"io.k8s.get"},
"apis/networking.k8s.io/v1": {"io.k8s.get"},
"apis/policy/v1beta1": {"io.k8s.get"},
"apis/rbac.authorization.k8s.io/v1": {"io.k8s.get"},
"apis/rbac.authorization.k8s.io/v1beta1": {"io.k8s.get"},
"apis/storage.k8s.io/v1": {"io.k8s.get"},
"apis/storage.k8s.io/v1beta1": {"io.k8s.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings": {"io.k8s.authorization.rbac.v1.clusterrolebindings.list"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/cluster-admin": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/cluster-admin-binding": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/defaultRoleBinding": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/event-exporter-rb": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/gce:beta:kubelet-certificate-bootstrap": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/gce:beta:kubelet-certificate-rotation": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/heapster-binding": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/kube-apiserver-kubelet-api-admin": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/kubelet-cluster-admin": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/kubernetes-dashboard": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/npd-binding": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/sysdig-agent": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:basic-user": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:attachdetach-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:certificate-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:cronjob-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:daemon-set-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:deployment-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:disruption-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:endpoint-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:generic-garbage-collector": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:horizontal-pod-autoscaler": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:job-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:namespace-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:node-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:persistent-volume-binder": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:pod-garbage-collector": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:replicaset-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:replication-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:resourcequota-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:route-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:service-account-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:service-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:statefulset-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:ttl-controller": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:discovery": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-controller-manager": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-dns": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-dns-autoscaler": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-scheduler": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:node": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterrolebindings/system:node-proxier": {"io.k8s.authorization.rbac.v1.clusterrolebindings.get"},
"rbac.authorization.k8s.io/v1/clusterroles": {"io.k8s.authorization.rbac.v1.clusterroles.list"},
"rbac.authorization.k8s.io/v1/clusterroles/admin": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/cluster-admin": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/edit": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/gce:beta:kubelet-certificate-bootstrap": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/gce:beta:kubelet-certificate-rotation": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/kubelet-api-admin": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/sysdig-agent": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/system:auth-delegator": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/system:basic-user": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/system:certificates.k8s.io:certificatesigningrequests:nodeclient": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/clusterroles/system:controller:attachdetach-controller": {"io.k8s.authorization.rbac.v1.clusterroles.get"},
"rbac.authorization.k8s.io/v1/namespaces/default/rolebindings": {"io.k8s.authorization.rbac.v1.rolebindings.list"},
"rbac.authorization.k8s.io/v1/namespaces/default/roles": {"io.k8s.authorization.rbac.v1.roles.list"}}
| 135.463415
| 192
| 0.639629
| 1,114
| 11,108
| 6.376122
| 0.085278
| 0.254259
| 0.062086
| 0.168943
| 0.928481
| 0.90328
| 0.87991
| 0.829649
| 0.794312
| 0.788118
| 0
| 0.034812
| 0.216421
| 11,108
| 81
| 193
| 137.135802
| 0.78125
| 0
| 0
| 0
| 0
| 0.160494
| 0.722902
| 0.691754
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
67d41035cf8f7643869ef035fe7d8db907536284
| 918
|
py
|
Python
|
nestedloops/simpleshapes.py
|
Shivams9/pythoncodecamp
|
e6cd27f4704a407ee360414a8c9236b254117a59
|
[
"MIT"
] | null | null | null |
nestedloops/simpleshapes.py
|
Shivams9/pythoncodecamp
|
e6cd27f4704a407ee360414a8c9236b254117a59
|
[
"MIT"
] | null | null | null |
nestedloops/simpleshapes.py
|
Shivams9/pythoncodecamp
|
e6cd27f4704a407ee360414a8c9236b254117a59
|
[
"MIT"
] | null | null | null |
"""
oooo
oooo
oooo
oooo
Analysis
nrows=4
row number of o's=nrows
1 4
2 4
3 4
4 4
"""
nrows = 4
for row in range(1, nrows + 1):
for col in range(1, nrows + 1):
print("o", end="")
print()
"""
o
oo
ooo
oooo
Analysis
nrows=4
row number of o's = row
1 1
2 2
3 3
4 4
"""
nrows = 4
for row in range(1, nrows + 1):
for col in range(1, row + 1):
print("o", end="")
print()
"""
---o
--oo
-ooo
oooo
Analysis
nrows=4
row number of spaces(nrows-row) number of o's=row
1 3 1
2 2 2
3 1 3
4 0 4
"""
nrows = 4
for row in range(1, nrows + 1):
for space in range(1, nrows - row + 1):
print("-", end="")
for col in range(1, row + 1):
print("o", end="")
print()
| 15.827586
| 57
| 0.421569
| 142
| 918
| 2.725352
| 0.161972
| 0.126615
| 0.144703
| 0.167959
| 0.795866
| 0.782946
| 0.782946
| 0.728682
| 0.728682
| 0.643411
| 0
| 0.095808
| 0.454248
| 918
| 57
| 58
| 16.105263
| 0.676647
| 0.11329
| 0
| 0.823529
| 0
| 0
| 0.009479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.411765
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
e1dfbfdbea368fc4abb9cb93518fbd2b8f60a328
| 36,362
|
py
|
Python
|
idaes/generic_models/properties/cubic_eos/tests/test_cubic_eos.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 112
|
2019-02-11T23:16:36.000Z
|
2022-03-23T20:59:57.000Z
|
idaes/generic_models/properties/cubic_eos/tests/test_cubic_eos.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 621
|
2019-03-01T14:44:12.000Z
|
2022-03-31T19:49:25.000Z
|
idaes/generic_models/properties/cubic_eos/tests/test_cubic_eos.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 154
|
2019-02-01T23:46:33.000Z
|
2022-03-23T15:07:10.000Z
|
#################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
Tests for the cubic root finder external functions
"""
import pytest
import os
from numpy import logspace
from pyomo.environ import (ConcreteModel,
ExternalFunction,
value)
from pyomo.core.base.external import AMPLExternalFunction
from idaes import bin_directory
__author__ = "Andrew Lee"
# Set path to root finder .so file
_so = os.path.join(bin_directory, "cubic_roots.so")
# Set module level pyest marker
pytestmark = pytest.mark.cubic_root
prop_available = os.path.isfile(_so)
# Define parameters for different supported equations of state
EoS_param = {
0: {'u': 2, 'w': -1},
1: {'u': 1, 'w': 0}
}
# Set paramter for number of points to test in reduced pressure and temperature
# Use Tr and Pr as A and B are linked
SAMPLES = 250
t_set = logspace(-1, 2, num=SAMPLES, base=10, endpoint=True)
p_set = logspace(-2, 2, num=SAMPLES, base=10, endpoint=True)
# Absolute tolerance for root finder checks
TOL = 1e-5
# Parameter indicating whether to test partial derivatives
TEST_DERS = True
# Relative finite difference step for partial derivatives
DEL = 1e-4
# Relative tolerance for accepting partial derivatives
FD_TOL = 5e-2
def between(y, x1, x2):
return 0 > (y-x1)*(y-x2)
@pytest.fixture()
def root_finder():
m = ConcreteModel()
# Define external function methods
m.proc_Z_liq = ExternalFunction(library=_so,
function="ceos_z_liq")
m.proc_Z_vap = ExternalFunction(library=_so,
function="ceos_z_vap")
m.proc_Z_liq_x = ExternalFunction(library=_so,
function="ceos_z_liq_extend")
m.proc_Z_vap_x = ExternalFunction(library=_so,
function="ceos_z_vap_extend")
return m
# TODO - Need tests for how external function behaves when given an invalid
# TODO - eos type. Currently seems to return a value, which probably should not
# TODO - happen.
@pytest.mark.integration
@pytest.mark.skipif(not prop_available,
reason="Cubic root finder not available")
def test_roots_Z_liq(root_finder):
for eos_type in [0, 1]:
u = EoS_param[eos_type]["u"]
w = EoS_param[eos_type]["w"]
for T in t_set:
for P in p_set:
# Calculate A and B parameters from Tr and Pr
A = 0.5*P/T**2
B = 0.1*P/T
# Get results of external function call
f = root_finder.proc_Z_liq
assert(isinstance(f, AMPLExternalFunction))
Z, g, h = f.evaluate_fgh(args=(eos_type, A, B))
# Calculate parameters of cubic
c1 = 1
c2 = -(1+B-u*B)
c3 = (A-u*B-(u-w)*B**2)
c4 = -(A*B+w*B**2+w*B**3)
# Calculate residual and derivatives w.r.t. Z
res = c1*Z**3 + c2*Z**2 + c3*Z + c4
dz = 3*c1*Z**2 + 2*c2*Z + c3
dz2 = 6*c1*Z + 2*c2
try:
# Residual can be extemely sensitive to value of Z, so
# if residual fails tolerance, test size of Newton step to
# converge to root
assert (pytest.approx(0, abs=TOL) == res or
pytest.approx(0, abs=TOL) == value(res/dz))
# Check derivative signs to confirm correct root
assert dz >= 0 # Should always have non-negative slope
# Determine number of roots - calculate discriminant
dis = (18*c1*c2*c3*c4 - 4*c2**3*c4 + c2**2*c3**2 -
4*c1*c3**3 - 27*c1**2*c4**2)
if dis >= 0:
# Cubic has 2 or 3 real roots
# Second derivative should be non-positive
assert dz2 <= 0
# otherwise no need to check 2nd derivative
if TEST_DERS:
# Perform finite differences on A and B
ZAp, gAp, hAp = f.evaluate_fgh(
args=(eos_type, A*(1+DEL), B))
ZAm, gAm, hAm = f.evaluate_fgh(
args=(eos_type, A*(1-DEL), B))
ZBp, gBp, hBp = f.evaluate_fgh(
args=(eos_type, A, B*(1+DEL)))
ZBm, gBm, hBm = f.evaluate_fgh(
args=(eos_type, A, B*(1-DEL)))
# Check variance in Z values. A very large difference
# indicates a transition between single and multiple
# root regions, and hence that the partial derivatvies
# will be very sensitive.
# In these cases, skip the derivative tests.
if abs(ZAp - Z) > 1e-3 or abs(ZAm - Z) > 1e-3:
A_skip = True
else:
A_skip = False
if (abs(ZBp - Z) > 1e-3 or
abs(ZBm - Z) > 1e-3 or
abs(dis) < 1e-7):
B_skip = True
else:
B_skip = False
# Test gradient terms
# Calculate numerical first partial derivative
if not A_skip:
dZdA_p = (ZAp-Z)/(A*DEL)
dZdA_m = (Z-ZAm)/(A*DEL)
if not B_skip:
dZdB_p = (ZBp-Z)/(B*DEL)
dZdB_m = (Z-ZBm)/(B*DEL)
# Partial derivative w.r.t. EoS identifier
assert g[0] == 0
# Check that external function value lies within TOL of
# least one of the numerical values (delta+ or delta-),
# OR lies between the two numerical values and within
# 10*TOL of one of the numerical values
if not A_skip:
assert (
pytest.approx(dZdA_p, FD_TOL) == g[1] or
pytest.approx(dZdA_m, FD_TOL) == g[1] or
(between(g[1], dZdA_p, dZdA_m) and (
pytest.approx(dZdA_p, 10*FD_TOL) == g[1] or
pytest.approx(dZdA_m, 10*FD_TOL) == g[1])))
if not B_skip:
assert (
pytest.approx(dZdB_p, FD_TOL) == g[2] or
pytest.approx(dZdB_m, FD_TOL) == g[2] or
(between(g[2], dZdB_p, dZdB_m) and (
pytest.approx(dZdB_p, 10*FD_TOL) == g[2] or
pytest.approx(dZdB_m, 10*FD_TOL) == g[2])))
# Test hessian terms
# Calculate numerical second partial derivatives
if not A_skip:
d2ZdA2_p = (gAp[1]-g[1])/(A*DEL)
d2ZdA2_m = (g[1]-gAm[1])/(A*DEL)
if not B_skip:
d2ZdB2_p = (gBp[2]-g[2])/(B*DEL)
d2ZdB2_m = (g[2]-gBm[2])/(B*DEL)
if not A_skip and not B_skip:
d2ZdAB_p = (gBp[1]-g[1])/(B*DEL)
d2ZdAB_m = (g[1]-gBm[1])/(B*DEL)
# Partial derivatives w.r.t eos_type
assert h[0] == 0
assert h[1] == 0
assert h[3] == 0
if not A_skip:
assert (pytest.approx(d2ZdA2_p, FD_TOL) == h[2] or
pytest.approx(d2ZdA2_m, FD_TOL) == h[2] or
between(h[2], d2ZdA2_p, d2ZdA2_m))
if not A_skip and not B_skip:
assert (pytest.approx(d2ZdAB_p, FD_TOL) == h[4] or
pytest.approx(d2ZdAB_m, FD_TOL) == h[4] or
between(h[4], d2ZdAB_p, d2ZdAB_m))
if not B_skip:
# Second derivative w.r.t. B is very sensitive near
# point that roots disappear, and is at a
# maximum (or minimum) so skip tests if close to
# this point
assert (pytest.approx(d2ZdB2_p, FD_TOL) == h[5] or
pytest.approx(d2ZdB2_m, FD_TOL) == h[5] or
between(h[5], d2ZdB2_p, d2ZdB2_m))
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z)
raise
@pytest.mark.integration
@pytest.mark.skipif(not prop_available,
reason="Cubic root finder not available")
def test_roots_Z_vap(root_finder):
for eos_type in [0, 1]:
u = EoS_param[eos_type]["u"]
w = EoS_param[eos_type]["w"]
for T in t_set:
for P in p_set:
# Calculate A and B parameters from Tr and Pr
A = 0.5*P/T**2
B = 0.1*P/T
# Get results of external function call
f = root_finder.proc_Z_vap
assert(isinstance(f, AMPLExternalFunction))
Z, g, h = f.evaluate_fgh(args=(eos_type, A, B))
# Calculate parameters of cubic
c1 = 1
c2 = -(1+B-u*B)
c3 = (A-u*B-(u-w)*B**2)
c4 = -(A*B+w*B**2+w*B**3)
# Calculate residual and derivatives w.r.t. Z
res = c1*Z**3 + c2*Z**2 + c3*Z + c4
dz = 3*c1*Z**2 + 2*c2*Z + c3
dz2 = 6*c1*Z + 2*c2
try:
# Residual can be extemely sensitive to value of Z, so
# if residual fails tolerance, test size of Newton step to
# converge to root
assert (pytest.approx(0, abs=TOL) == res or
pytest.approx(0, abs=TOL) == value(res/dz))
# Check derivative signs to confirm correct root
assert dz >= 0 # Should always have non-negative slope
# Determine number of roots - calculate discriminant
dis = (18*c1*c2*c3*c4 - 4*c2**3*c4 + c2**2*c3**2 -
4*c1*c3**3 - 27*c1**2*c4**2)
if dis >= 0:
# Cubic has 2 or 3 real roots
# Second derivative should be non-negative
assert dz2 >= 0
# otherwise no need to check 2nd derivative
if TEST_DERS:
# Perform finite differences on A and B
ZAp, gAp, hAp = f.evaluate_fgh(
args=(eos_type, A*(1+DEL), B))
ZAm, gAm, hAm = f.evaluate_fgh(
args=(eos_type, A*(1-DEL), B))
ZBp, gBp, hBp = f.evaluate_fgh(
args=(eos_type, A, B*(1+DEL)))
ZBm, gBm, hBm = f.evaluate_fgh(
args=(eos_type, A, B*(1-DEL)))
# Check variance in Z values. A very large difference
# indicates a transition between single and multiple
# root regions, and hence that the partial derivatvies
# will be very sensitive.
# In these cases, skip the derivative tests.
if abs(ZAp - Z) > 1e-3 or abs(ZAm - Z) > 1e-3:
A_skip = True
else:
A_skip = False
if (abs(ZBp - Z) > 1e-3 or
abs(ZBm - Z) > 1e-3 or
abs(dis) < 1e-7):
B_skip = True
else:
B_skip = False
# Test gradient terms
# Calculate numerical first partial derivative
if not A_skip:
dZdA_p = (ZAp-Z)/(A*DEL)
dZdA_m = (Z-ZAm)/(A*DEL)
if not B_skip:
dZdB_p = (ZBp-Z)/(B*DEL)
dZdB_m = (Z-ZBm)/(B*DEL)
# Partial derivative w.r.t. EoS identifier
assert g[0] == 0
# Check that external function value lies within TOL of
# least one of the numerical values (delta+ or delta-),
# OR lies between the two numerical values and within
# 10*TOL of one of the numerical values
if not A_skip:
assert (
pytest.approx(dZdA_p, FD_TOL) == g[1] or
pytest.approx(dZdA_m, FD_TOL) == g[1] or
(between(g[1], dZdA_p, dZdA_m) and (
pytest.approx(dZdA_p, 10*FD_TOL) == g[1] or
pytest.approx(dZdA_m, 10*FD_TOL) == g[1])))
if not B_skip:
assert (
pytest.approx(dZdB_p, FD_TOL) == g[2] or
pytest.approx(dZdB_m, FD_TOL) == g[2] or
(between(g[2], dZdB_p, dZdB_m) and (
pytest.approx(dZdB_p, 10*FD_TOL) == g[2] or
pytest.approx(dZdB_m, 10*FD_TOL) == g[2])))
# Test hessian terms
# Calculate numerical second partial derivatives
if not A_skip:
d2ZdA2_p = (gAp[1]-g[1])/(A*DEL)
d2ZdA2_m = (g[1]-gAm[1])/(A*DEL)
if not B_skip:
d2ZdB2_p = (gBp[2]-g[2])/(B*DEL)
d2ZdB2_m = (g[2]-gBm[2])/(B*DEL)
if not A_skip and not B_skip:
d2ZdAB_p = (gBp[1]-g[1])/(B*DEL)
d2ZdAB_m = (g[1]-gBm[1])/(B*DEL)
# Partial derivatives w.r.t eos_type
assert h[0] == 0
assert h[1] == 0
assert h[3] == 0
if not A_skip:
assert (pytest.approx(d2ZdA2_p, FD_TOL) == h[2] or
pytest.approx(d2ZdA2_m, FD_TOL) == h[2] or
between(h[2], d2ZdA2_p, d2ZdA2_m))
if not A_skip and not B_skip:
assert (pytest.approx(d2ZdAB_p, FD_TOL) == h[4] or
pytest.approx(d2ZdAB_m, FD_TOL) == h[4] or
between(h[4], d2ZdAB_p, d2ZdAB_m))
if not B_skip:
# Second derivative w.r.t. B is very sensitive near
# point that roots disappear, and is at a
# maximum (or minimum) so skip tests if close to
# this point
assert (pytest.approx(d2ZdB2_p, FD_TOL) == h[5] or
pytest.approx(d2ZdB2_m, FD_TOL) == h[5] or
between(h[5], d2ZdB2_p, d2ZdB2_m))
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z)
raise
@pytest.mark.integration
@pytest.mark.skipif(not prop_available,
reason="Cubic root finder not available")
def test_roots_Z_liq_ext(root_finder):
for eos_type in [0, 1]:
u = EoS_param[eos_type]["u"]
w = EoS_param[eos_type]["w"]
for T in t_set:
for P in p_set:
# Calculate A and B parameters from Tr and Pr
A = 0.5*P/T**2
B = 0.1*P/T
# Get results of external function call
f = root_finder.proc_Z_liq_x
assert(isinstance(f, AMPLExternalFunction))
Z, g, h = f.evaluate_fgh(args=(eos_type, A, B))
# Calculate parameters of cubic
c1 = 1
c2 = -(1+B-u*B)
c3 = (A-u*B-(u-w)*B**2)
c4 = -(A*B+w*B**2+w*B**3)
det = c2**2 - 3*c3
a = -(1.0/3.0)*(c2 + det**0.5)
# Check to see if extension is triggered
if det <= 0 or (a**3 + c2*a**2 + c3*a + c4) >= 0:
# Extension is not used
# Calculate residual and derivatives w.r.t. Z
res = c1*Z**3 + c2*Z**2 + c3*Z + c4
dz = 3*c1*Z**2 + 2*c2*Z + c3
dz2 = 6*c1*Z + 2*c2
try:
# Residual can be extemely sensitive to value of Z, so
# if residual fails tolerance, test size of Newton step
# to converge to root
assert (pytest.approx(0, abs=TOL) == res or
pytest.approx(0, abs=TOL) == value(res/dz))
# Check derivative signs to confirm correct root
assert dz >= 0 # Should always have non-negative slope
# Determine number of roots - calculate discriminant
dis = (18*c1*c2*c3*c4 - 4*c2**3*c4 + c2**2*c3**2 -
4*c1*c3**3 - 27*c1**2*c4**2)
if dis >= 0:
# Cubic has 2 or 3 real roots
# Second derivative should be non-positive
assert dz2 <= 0
# otherwise no need to check 2nd derivative
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z, det, a)
raise
else:
# Extention is used, calculate extended root
c1x = 2*a
c2x = -c2 - 3.0*c1x
c3x = 3*c1x**2 + 2*c2*c1x + c3
c4x = c4 - 0.75*c1x**3 - 0.5*c2*c1x**2
# Calculate residual and derivatives w.r.t. Z_ext
res = c1*Z**3 + c2x*Z**2 + c3x*Z + c4x
dz = 3*c1*Z**2 + 2*c2x*Z + c3x
try:
# Residual can be extemely sensitive to value of Z, so
# if residual fails tolerance, test size of Newton step
# to converge to root
assert (pytest.approx(0, abs=TOL) == res or
pytest.approx(0, abs=TOL) == value(res/dz))
# Check derivative signs to confirm correct root
assert dz >= 0 # Should always have non-negative slope
# Determine number of roots - calculate discriminant
dis = (18*c1*c2x*c3x*c4x - 4*c2x**3*c4x +
c2x**2*c3x**2 - 4*c1*c3x**3 - 27*c1**2*c4x**2)
# Second derivative could be anything, don't check
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z)
raise
if TEST_DERS:
try:
# Perform finite differences on A and B
ZAp, gAp, hAp = f.evaluate_fgh(
args=(eos_type, A*(1+DEL), B))
ZAm, gAm, hAm = f.evaluate_fgh(
args=(eos_type, A*(1-DEL), B))
ZBp, gBp, hBp = f.evaluate_fgh(
args=(eos_type, A, B*(1+DEL)))
ZBm, gBm, hBm = f.evaluate_fgh(
args=(eos_type, A, B*(1-DEL)))
# Check variance in Z values. A very large
# difference indicates a transition between
# single and multiple root regions, and hence that
# the partial derivatvies will be very sensitive.
# In these cases, skip the derivative tests.
if (abs(ZAp - Z) > 1e-3 or
abs(ZAm - Z) > 1e-3 or
abs(a) < 1e-1):
A_skip = True
else:
A_skip = False
if (abs(ZBp - Z) > 1e-3 or
abs(ZBm - Z) > 1e-3 or
abs(dis) < 1e-7 or
abs(a) < 1e-1):
B_skip = True
else:
B_skip = False
# Test gradient terms
# Calculate numerical first partial derivative
if not A_skip:
dZdA_p = (ZAp-Z)/(A*DEL)
dZdA_m = (Z-ZAm)/(A*DEL)
if not B_skip:
dZdB_p = (ZBp-Z)/(B*DEL)
dZdB_m = (Z-ZBm)/(B*DEL)
# Partial derivative w.r.t. EoS identifier
assert g[0] == 0
# Check that external function value lies within
# TOL of least one of the numerical values (delta+
# or delta-), OR lies between the two numerical
# values and within 10*TOL of one of the numerical
# values
if not A_skip:
assert (
pytest.approx(dZdA_p, FD_TOL) == g[1] or
pytest.approx(dZdA_m, FD_TOL) == g[1] or
(between(g[1], dZdA_p, dZdA_m) and (
pytest.approx(dZdA_p, 10*FD_TOL) ==
g[1] or
pytest.approx(dZdA_m, 10*FD_TOL) ==
g[1])))
if not B_skip:
assert (
pytest.approx(dZdB_p, FD_TOL) == g[2] or
pytest.approx(dZdB_m, FD_TOL) == g[2] or
(between(g[2], dZdB_p, dZdB_m) and (
pytest.approx(dZdB_p, 10*FD_TOL) ==
g[2] or
pytest.approx(dZdB_m, 10*FD_TOL) ==
g[2])))
# Test hessian terms
# Calculate numerical second partial derivatives
if not A_skip:
d2ZdA2_p = (gAp[1]-g[1])/(A*DEL)
d2ZdA2_m = (g[1]-gAm[1])/(A*DEL)
if not B_skip:
d2ZdB2_p = (gBp[2]-g[2])/(B*DEL)
d2ZdB2_m = (g[2]-gBm[2])/(B*DEL)
if not A_skip and not B_skip:
d2ZdAB_p = (gBp[1]-g[1])/(B*DEL)
d2ZdAB_m = (g[1]-gBm[1])/(B*DEL)
# Partial derivatives w.r.t eos_type
assert h[0] == 0
assert h[1] == 0
assert h[3] == 0
if not A_skip:
assert (
pytest.approx(d2ZdA2_p, FD_TOL) == h[2] or
pytest.approx(d2ZdA2_m, FD_TOL) == h[2] or
between(h[2], d2ZdA2_p, d2ZdA2_m))
if not A_skip and not B_skip:
assert (
pytest.approx(d2ZdAB_p, FD_TOL) == h[4] or
pytest.approx(d2ZdAB_m, FD_TOL) == h[4] or
between(h[4], d2ZdAB_p, d2ZdAB_m))
if not B_skip:
# Second derivative w.r.t. B is very sensitive
# near point that roots disappear, and is at a
# maximum (or minimum) so skip tests if close
# to this point
assert (
pytest.approx(d2ZdB2_p, FD_TOL) == h[5] or
pytest.approx(d2ZdB2_m, FD_TOL) == h[5] or
between(h[5], d2ZdB2_p, d2ZdB2_m))
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z)
raise
@pytest.mark.integration
@pytest.mark.skipif(not prop_available,
reason="Cubic root finder not available")
def test_roots_Z_vap_ext(root_finder):
for eos_type in [0, 1]:
u = EoS_param[eos_type]["u"]
w = EoS_param[eos_type]["w"]
for T in t_set:
for P in p_set:
# Calculate A and B parameters from Tr and Pr
A = 0.5*P/T**2
B = 0.1*P/T
# Get results of external function call
f = root_finder.proc_Z_vap_x
assert(isinstance(f, AMPLExternalFunction))
Z, g, h = f.evaluate_fgh(args=(eos_type, A, B))
# Calculate parameters of cubic
c1 = 1
c2 = -(1+B-u*B)
c3 = (A-u*B-(u-w)*B**2)
c4 = -(A*B+w*B**2+w*B**3)
det = c2**2 - 3*c3
a = -(1.0/3.0)*(c2 - det**0.5)
# Check to see if extension is triggered
if det <= 0 or (a**3 + c2*a**2 + c3*a + c4) <= 0:
# Extension is not used
# Calculate residual and derivatives w.r.t. Z
res = c1*Z**3 + c2*Z**2 + c3*Z + c4
dz = 3*c1*Z**2 + 2*c2*Z + c3
dz2 = 6*c1*Z + 2*c2
try:
# Residual can be extemely sensitive to value of Z, so
# if residual fails tolerance, test size of Newton step
# to converge to root
assert (pytest.approx(0, abs=TOL) == res or
pytest.approx(0, abs=TOL) == value(res/dz))
# Check derivative signs to confirm correct root
assert dz >= 0 # Should always have non-negative slope
# Determine number of roots - calculate discriminant
dis = (18*c1*c2*c3*c4 - 4*c2**3*c4 + c2**2*c3**2 -
4*c1*c3**3 - 27*c1**2*c4**2)
if dis >= 0:
# Cubic has 2 or 3 real roots
# Second derivative should be non-negative
assert dz2 >= 0
# otherwise no need to check 2nd derivative
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z)
raise
else:
# Extention is used, calculate extended root
c1x = 2*a
c2x = -c2 - 3.0*c1x
c3x = 3*c1x**2 + 2*c2*c1x + c3
c4x = c4 - 0.75*c1x**3 - 0.5*c2*c1x**2
# Calculate residual and derivatives w.r.t. Z_ext
res = c1*Z**3 + c2x*Z**2 + c3x*Z + c4x
dz = 3*c1*Z**2 + 2*c2x*Z + c3x
try:
# Residual can be extemely sensitive to value of Z, so
# if residual fails tolerance, test size of Newton step
# to converge to root
assert (pytest.approx(0, abs=TOL) == res or
pytest.approx(0, abs=TOL) == value(res/dz))
# Check derivative signs to confirm correct root
assert dz >= 0 # Should always have non-negative slope
# Determine number of roots - calculate discriminant
dis = (18*c1*c2x*c3x*c4x - 4*c2x**3*c4x +
c2x**2*c3x**2 - 4*c1*c3x**3 - 27*c1**2*c4x**2)
# Second derivative could be anything, don't check
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z)
raise
if TEST_DERS:
try:
# Perform finite differences on A and B
ZAp, gAp, hAp = f.evaluate_fgh(
args=(eos_type, A*(1+DEL), B))
ZAm, gAm, hAm = f.evaluate_fgh(
args=(eos_type, A*(1-DEL), B))
ZBp, gBp, hBp = f.evaluate_fgh(
args=(eos_type, A, B*(1+DEL)))
ZBm, gBm, hBm = f.evaluate_fgh(
args=(eos_type, A, B*(1-DEL)))
# Check variance in Z values. A very large
# difference indicates a transition between
# single and multiple root regions, and hence that
# the partial derivatvies will be very sensitive.
# In these cases, skip the derivative tests.
if (abs(ZAp - Z) > 1e-3 or
abs(ZAm - Z) > 1e-3 or
abs(a) < 0.5):
A_skip = True
else:
A_skip = False
if (abs(ZBp - Z) > 1e-3 or
abs(ZBm - Z) > 1e-3 or
abs(dis) < 1e-7 or
abs(a) < 0.5):
B_skip = True
else:
B_skip = False
# Test gradient terms
# Calculate numerical first partial derivative
if not A_skip:
dZdA_p = (ZAp-Z)/(A*DEL)
dZdA_m = (Z-ZAm)/(A*DEL)
if not B_skip:
dZdB_p = (ZBp-Z)/(B*DEL)
dZdB_m = (Z-ZBm)/(B*DEL)
# Partial derivative w.r.t. EoS identifier
assert g[0] == 0
# Check that external function value lies within
# TOL of least one of the numerical values (delta+
# or delta-), OR lies between the two numerical
# values and within 10*TOL of one of the numerical
# values
if not A_skip:
assert (
pytest.approx(dZdA_p, FD_TOL) == g[1] or
pytest.approx(dZdA_m, FD_TOL) == g[1] or
(between(g[1], dZdA_p, dZdA_m) and (
pytest.approx(dZdA_p, 10*FD_TOL) ==
g[1] or
pytest.approx(dZdA_m, 10*FD_TOL) ==
g[1])))
if not B_skip:
assert (
pytest.approx(dZdB_p, FD_TOL) == g[2] or
pytest.approx(dZdB_m, FD_TOL) == g[2] or
(between(g[2], dZdB_p, dZdB_m) and (
pytest.approx(dZdB_p, 10*FD_TOL) ==
g[2] or
pytest.approx(dZdB_m, 10*FD_TOL) ==
g[2])))
# Test hessian terms
# Calculate numerical second partial derivatives
if not A_skip:
d2ZdA2_p = (gAp[1]-g[1])/(A*DEL)
d2ZdA2_m = (g[1]-gAm[1])/(A*DEL)
if not B_skip:
d2ZdB2_p = (gBp[2]-g[2])/(B*DEL)
d2ZdB2_m = (g[2]-gBm[2])/(B*DEL)
if not A_skip and not B_skip:
d2ZdAB_p = (gBp[1]-g[1])/(B*DEL)
d2ZdAB_m = (g[1]-gBm[1])/(B*DEL)
# Partial derivatives w.r.t eos_type
assert h[0] == 0
assert h[1] == 0
assert h[3] == 0
if not A_skip:
assert (
pytest.approx(d2ZdA2_p, FD_TOL) == h[2] or
pytest.approx(d2ZdA2_m, FD_TOL) == h[2] or
between(h[2], d2ZdA2_p, d2ZdA2_m))
if not A_skip and not B_skip:
assert (
pytest.approx(d2ZdAB_p, FD_TOL) == h[4] or
pytest.approx(d2ZdAB_m, FD_TOL) == h[4] or
between(h[4], d2ZdAB_p, d2ZdAB_m))
if not B_skip:
# Second derivative w.r.t. B is very sensitive
# near point that roots disappear, and is at a
# maximum (or minimum) so skip tests if close
# to this point
assert (
pytest.approx(d2ZdB2_p, FD_TOL) == h[5] or
pytest.approx(d2ZdB2_m, FD_TOL) == h[5] or
between(h[5], d2ZdB2_p, d2ZdB2_m))
except AssertionError:
# Print values at failure and raise exception
print(eos_type, T, P, A, B, Z)
raise
| 45.114144
| 81
| 0.407541
| 4,154
| 36,362
| 3.466538
| 0.079682
| 0.056667
| 0.033056
| 0.016667
| 0.897778
| 0.897083
| 0.897083
| 0.881528
| 0.875417
| 0.875417
| 0
| 0.046825
| 0.501953
| 36,362
| 805
| 82
| 45.170186
| 0.748316
| 0.218442
| 0
| 0.897331
| 0
| 0
| 0.007626
| 0
| 0
| 0
| 0
| 0.001242
| 0.131417
| 1
| 0.01232
| false
| 0
| 0.01232
| 0.002053
| 0.028747
| 0.016427
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e1efdc5cda3dab5e72e4e4011c30c476af2808b0
| 328
|
py
|
Python
|
pytorch_grad_cam/__init__.py
|
LucaButera/pytorch-grad-cam
|
582913a34264a45b581d23d13d0b42351ffef3a4
|
[
"MIT"
] | 1
|
2021-04-26T07:57:39.000Z
|
2021-04-26T07:57:39.000Z
|
pytorch_grad_cam/__init__.py
|
Spicybird/pytorch-grad-cam
|
977556ee2ceda7487b3fe8c27e62ec26040b960b
|
[
"MIT"
] | null | null | null |
pytorch_grad_cam/__init__.py
|
Spicybird/pytorch-grad-cam
|
977556ee2ceda7487b3fe8c27e62ec26040b960b
|
[
"MIT"
] | null | null | null |
from pytorch_grad_cam.grad_cam import GradCAM
from pytorch_grad_cam.ablation_cam import AblationCAM
from pytorch_grad_cam.xgrad_cam import XGradCAM
from pytorch_grad_cam.grad_cam_plusplus import GradCAMPlusPlus
from pytorch_grad_cam.score_cam import ScoreCAM
from pytorch_grad_cam.guided_backprop import GuidedBackpropReLUModel
| 46.857143
| 68
| 0.908537
| 49
| 328
| 5.693878
| 0.346939
| 0.200717
| 0.322581
| 0.387097
| 0.179211
| 0.179211
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 328
| 6
| 69
| 54.666667
| 0.917763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fbe757a793ea122f782b86a45ee7629ab061dce0
| 343
|
py
|
Python
|
tests/internal/instance_type/test_instance_type_i_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_i_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_i_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module instance_type.i
import pytest
import ec2_compare.internal.instance_type.i
def test_get_internal_data_instance_type_i_get_instances_list():
assert len(ec2_compare.internal.instance_type.i.get_instances_list()) > 0
def test_get_internal_data_instance_type_i_get():
assert len(ec2_compare.internal.instance_type.i.get) > 0
| 34.3
| 75
| 0.845481
| 56
| 343
| 4.732143
| 0.339286
| 0.271698
| 0.29434
| 0.241509
| 0.826415
| 0.826415
| 0.611321
| 0.611321
| 0.611321
| 0
| 0
| 0.015773
| 0.075802
| 343
| 9
| 76
| 38.111111
| 0.820189
| 0.087464
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
220897503641e9373fd0393e3d3c99b2c6a7387f
| 10,376
|
py
|
Python
|
aventura.py
|
ItzAlexArtz/python-text-adventure
|
265782b6a4ff4a495bd32eac475743e21ba7e24a
|
[
"MIT"
] | null | null | null |
aventura.py
|
ItzAlexArtz/python-text-adventure
|
265782b6a4ff4a495bd32eac475743e21ba7e24a
|
[
"MIT"
] | null | null | null |
aventura.py
|
ItzAlexArtz/python-text-adventure
|
265782b6a4ff4a495bd32eac475743e21ba7e24a
|
[
"MIT"
] | null | null | null |
import time
import random
#game function
def game():
print ("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print ("Welcome to the cavern of secrets!")
print ("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
time.sleep(3)
print ("You enter a dark cavern out of curiosity. It is dark and you can only make out a small stick on the floor.")
ch1 = str(input("Do you take it? [y/n]: "))
#STICK TAKEN
if ch1 in ['y', 'Y', 'Yes', 'YES', 'yes']:
print("You have taken the stick!")
time.sleep(2)
stick = 1
#STICK NOT TAKEN
else:
print("You did not take the stick")
stick = 0
print ("As you proceed further into the cave, you see a small glowing object")
ch2 = str(input("Do you approach the object? [y/n]"))
#APPROACH SPIDER
if ch2 in ['y', 'Y', 'Yes', 'YES', 'yes']:
print ("You approach the object...")
time.sleep(2)
print ("As you draw closer, you begin to make out the object as an eye!")
time.sleep(1)
print ("The eye belongs to a giant spider!")
ch3 = str(input("Do you try to fight it? [Y/N]"))
#APPROACH SPIDER
elif ch2 in ['n', 'N', 'No', 'NO', 'no']:
print ("You don't approach the object...")
time.sleep(2)
print ("As you walk away, the object begins to come closer to you!")
time.sleep(1)
print ("The object is an eye that belongs to a giant spider!")
ch3 = str(input("Do you try to fight it? [Y/N]"))
# FIGHT SPIDER
if ch3 in ['y', 'Y', 'Yes', 'YES', 'yes']:
# WITH STICK
if stick == 1:
print("You only have a stick to fight with!")
print("You quickly jab the spider in it's eye and gain an advantage")
time.sleep(2)
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(" Fighting... ")
print(" YOU MUST HIT ABOVE A 5 TO KILL THE SPIDER ")
print("IF THE SPIDER HITS HIGHER THAN YOU, YOU WILL DIE")
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
time.sleep(2)
fdmg1 = int(random.randint(3, 10))
edmg1 = int(random.randint(1, 5))
print("you hit a", fdmg1)
print("the spider hits a", edmg1)
time.sleep(2)
if edmg1 > fdmg1:
print ("The spider has dealt more damage than you!")
complete = 0
return complete
elif fdmg1 < 5:
print ("You didn't do enough damage to kill the spider, but you manage to escape")
complete = 1
return complete
else:
print ("You killed the spider!")
print ("As you want to walk away you heard a girl screaming!")
explore = input ('Do you want to find out who screamed? [y/n] ')
if explore in ['y', 'Y', 'yes', 'YES', 'Yes', ]:
print ("As you where going further into the cave, you see a princess!")
fight = input("Do you want to save her? [y/n]")
if fight in ['y', 'Y', 'yes', 'YES', 'Yes', ]:
print ("As you walk closer to her a skeleton with a sword and a shield reveals himself from the darkness of the cave!")
fight = str(input("Do you try to fight it? [Y/N]"))
if fight in ['y', 'Y', 'yes', 'YES', 'Yes', ]:
print ("You choose to fight it!")
time.sleep(2)
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(" Fighting... ")
print(" YOU MUST HIT ABOVE A 20 TO KILL THE Skeleton ")
print("IF THE Skeleton HITS HIGHER THAN YOU, YOU WILL DIE")
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
time.sleep(2)
fdmg1 = int(random.randint(20, 30))
edmg1 = int(random.randint(10, 15))
print("you hit a", fdmg1)
print("the skeleton hits a", edmg1)
time.sleep(2)
print("You saved the princess and she thanks you for saving her!")
print("Getting out of the cave .......")
print("Getting the princess to her kingdom......")
time.sleep(2)
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(" You Won the Game! Congrats! ")
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
complete = 1
return complete
else:
if fight in ['n', 'N', 'no', ]:
print("You choose not to fight the Skeleton")
time.sleep(1)
print("As yo turn away it ambushes you with its sword and kills you!!!")
elif explore in ['n', 'N', 'no', 'NO', 'No', ]:
print("When you wanted to get out of the cave and go home a giant spider jumped in front of you from the darkness and killed you!")
# WITHOUT STICK
else:
print("You don't have anything to fight with!")
time.sleep(2)
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(" Fighting... ")
print(" YOU MUST HIT ABOVE A 10 TO KILL THE SPIDER ")
print("IF THE SPIDER HITS HIGHER THAN YOU, YOU WILL DIE")
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
time.sleep(2)
fdmg1 = int(random.randint(10, 12))
edmg1 = int(random.randint(1, 5))
print("you hit a", fdmg1)
print("the spider hits a", edmg1)
time.sleep(2)
if edmg1 > fdmg1:
print ("The spider has dealt more damage than you!")
complete = 0
return complete
elif fdmg1 < 5:
print ("You didn't do enough damage to kill the spider, but you manage to escape")
complete = 1
return complete
else:
print ("You killed the spider!")
print ("As you want to walk away you heard a girl screaming!")
explore = input ('Do you want to find out who screamed? [y/n]')
fight = input("Do you want to save her? [y/n]")
if explore in ['y', 'Y', 'yes', 'YES', 'Yes']:
print ("As you where going further into the cave ,you saw a princess! Do you want to save her? [y/n]")
if fight in ['y', 'Y', 'yes', 'YES', 'Yes', ]:
print ("As you walk closer to her a skeleton with a sword and a shield reveals himself from the darkness of the cave!")
fight = str(input("Do you try to fight it? [Y/N]"))
if fight in ['y', 'Y', 'yes', 'YES', 'Yes', ]:
print ("You choose to fight it!")
time.sleep(2)
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(" Fighting... ")
print(" YOU MUST HIT ABOVE A 20 TO KILL THE Skeleton ")
print("IF THE Skeleton HITS HIGHER THAN YOU, YOU WILL DIE")
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
time.sleep(2)
fdmg1 = int(random.randint(1, 20))
edmg1 = int(random.randint(1, 15))
print("you hit a", fdmg1)
print("the skeleton hits a", edmg1)
time.sleep(2)
print("You saved the princess and she thanks you for saving her!")
print("Getting out of the cave .......")
print("Getting the princess to her kingdom......")
time.sleep(2)
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(" You Won the Game! Congrats! ")
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
complete = 1
return complete
else:
if fight in ['n', 'N', 'no' , ]:
print("You choose not to fight the Skeleton")
time.sleep(1)
print("As yo turn away it ambushes you with its sword and kills you!!!")
elif explore in ['n', 'N', 'no', 'NO', 'No', ]:
print("When you wanted to get out of the cave and go home a giant spider jumped in front of you from the darkness and killed you!")
#DON'T FIGHT SPIDER
elif ch3 in ['n', 'N', 'No', 'NO', 'no']:
print ("You choose not to fight the spider.")
time.sleep(1)
print ("As you turn away, it ambushes you and impales you with it's fangs!!!")
complete = 0
return complete
# game loop
alive = True
while alive:
complete = game()
if complete == 1:
alive = input('You managed to escape the cavern alive! Would you like to play again? [y/n]: ')
if alive in ['y', 'Y', 'YES', 'yes', 'Yes',]:
alive
else:
break
else:
alive = input('You have died! Would you like to play again? [y/n]: ')
if alive in ['y', 'Y', 'YES', 'yes', 'Yes',]:
alive
else:
break
| 47.163636
| 174
| 0.414129
| 1,147
| 10,376
| 3.746295
| 0.152572
| 0.053991
| 0.039562
| 0.017919
| 0.780312
| 0.757971
| 0.754945
| 0.748196
| 0.725623
| 0.708401
| 0
| 0.016605
| 0.425405
| 10,376
| 219
| 175
| 47.378995
| 0.704126
| 0.013011
| 0
| 0.731844
| 0
| 0.03352
| 0.420585
| 0.063532
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005587
| false
| 0
| 0.011173
| 0
| 0.055866
| 0.435754
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
224962418a269addec975f18a67c211e2fea9004
| 226
|
py
|
Python
|
np/reference/ch8code/arrayalmostequal.py
|
focusunsink/study_python
|
322326642db54df8725793d70a95d21ac40b6507
|
[
"MIT"
] | null | null | null |
np/reference/ch8code/arrayalmostequal.py
|
focusunsink/study_python
|
322326642db54df8725793d70a95d21ac40b6507
|
[
"MIT"
] | null | null | null |
np/reference/ch8code/arrayalmostequal.py
|
focusunsink/study_python
|
322326642db54df8725793d70a95d21ac40b6507
|
[
"MIT"
] | null | null | null |
import numpy as np
print "Decimal 8", np.testing.assert_array_almost_equal([0, 0.123456789], [0, 0.123456780], decimal=8)
print "Decimal 9", np.testing.assert_array_almost_equal([0, 0.123456789], [0, 0.123456780], decimal=9)
| 45.2
| 102
| 0.747788
| 38
| 226
| 4.289474
| 0.421053
| 0.04908
| 0.184049
| 0.245399
| 0.736196
| 0.736196
| 0.736196
| 0.736196
| 0.736196
| 0.736196
| 0
| 0.234146
| 0.09292
| 226
| 4
| 103
| 56.5
| 0.560976
| 0
| 0
| 0
| 0
| 0
| 0.079646
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 13
|
3f0e304d267cd81efea3bdf14fd10e650273c5ec
| 24,698
|
py
|
Python
|
tests/unit_tests/test_tethys_apps/test_utilities.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2020-10-08T20:38:33.000Z
|
2020-10-08T20:38:33.000Z
|
tests/unit_tests/test_tethys_apps/test_utilities.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2018-04-14T19:40:54.000Z
|
2018-04-14T19:40:54.000Z
|
tests/unit_tests/test_tethys_apps/test_utilities.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2021-09-07T14:47:11.000Z
|
2021-09-07T14:47:11.000Z
|
import unittest
import mock
from tethys_apps import utilities
class TethysAppsUtilitiesTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_get_directories_in_tethys_templates(self):
# Get the templates directories for the test_app and test_extension
result = utilities.get_directories_in_tethys(('templates',))
self.assertGreaterEqual(len(result), 2)
test_app = False
test_ext = False
for r in result:
if '/tethysapp/test_app/templates' in r:
test_app = True
if '/tethysext-test_extension/tethysext/test_extension/templates' in r:
test_ext = True
self.assertTrue(test_app)
self.assertTrue(test_ext)
def test_get_directories_in_tethys_templates_with_app_name(self):
# Get the templates directories for the test_app and test_extension
# Use the with_app_name argument, so that the app and extension names appear in the result
result = utilities.get_directories_in_tethys(('templates',), with_app_name=True)
self.assertGreaterEqual(len(result), 2)
self.assertEqual(2, len(result[0]))
self.assertEqual(2, len(result[1]))
test_app = False
test_ext = False
for r in result:
if 'test_app' in r and '/tethysapp/test_app/templates' in r[1]:
test_app = True
if 'test_extension' in r and '/tethysext-test_extension/tethysext/test_extension/templates' in r[1]:
test_ext = True
self.assertTrue(test_app)
self.assertTrue(test_ext)
@mock.patch('tethys_apps.utilities.SingletonHarvester')
def test_get_directories_in_tethys_templates_extension_import_error(self, mock_harvester):
# Mock the extension_modules variable with bad data, to throw an ImportError
mock_harvester().extension_modules = {'foo_invalid_foo': 'tethysext.foo_invalid_foo'}
result = utilities.get_directories_in_tethys(('templates',))
self.assertGreaterEqual(len(result), 1)
test_app = False
test_ext = False
for r in result:
if '/tethysapp/test_app/templates' in r:
test_app = True
if '/tethysext-test_extension/tethysext/test_extension/templates' in r:
test_ext = True
self.assertTrue(test_app)
self.assertFalse(test_ext)
def test_get_directories_in_tethys_foo(self):
# Get the foo directories for the test_app and test_extension
# foo doesn't exist
result = utilities.get_directories_in_tethys(('foo',))
self.assertEqual(0, len(result))
def test_get_directories_in_tethys_foo_public(self):
# Get the foo and public directories for the test_app and test_extension
# foo doesn't exist, but public will
result = utilities.get_directories_in_tethys(('foo', 'public'))
self.assertGreaterEqual(len(result), 2)
test_app = False
test_ext = False
for r in result:
if '/tethysapp/test_app/public' in r:
test_app = True
if '/tethysext-test_extension/tethysext/test_extension/public' in r:
test_ext = True
self.assertTrue(test_app)
self.assertTrue(test_ext)
def test_get_active_app_none_none(self):
# Get the active TethysApp object, with a request of None and url of None
result = utilities.get_active_app(request=None, url=None)
self.assertEqual(None, result)
# Try again with the defaults, which are a request of None and url of None
result = utilities.get_active_app()
self.assertEqual(None, result)
@mock.patch('tethys_apps.models.TethysApp')
def test_get_active_app_request(self, mock_app):
# Mock up for TethysApp, and request
mock_app.objects.get.return_value = mock.MagicMock()
mock_request = mock.MagicMock()
mock_request.path = '/apps/foo/bar'
# Result should be mock for mock_app.objects.get.return_value
result = utilities.get_active_app(request=mock_request)
self.assertEqual(mock_app.objects.get(), result)
@mock.patch('tethys_apps.models.TethysApp')
def test_get_active_app_url(self, mock_app):
# Mock up for TethysApp
mock_app.objects.get.return_value = mock.MagicMock()
# Result should be mock for mock_app.objects.get.return_value
result = utilities.get_active_app(url='/apps/foo/bar')
self.assertEqual(mock_app.objects.get(), result)
@mock.patch('tethys_apps.models.TethysApp')
def test_get_active_app_request_bad_path(self, mock_app):
# Mock up for TethysApp
mock_app.objects.get.return_value = mock.MagicMock()
mock_request = mock.MagicMock()
# Path does not contain apps
mock_request.path = '/foo/bar'
# Because 'app' not in request path, return None
result = utilities.get_active_app(request=mock_request)
self.assertEqual(None, result)
@mock.patch('tethys_apps.utilities.tethys_log.warning')
@mock.patch('tethys_apps.models.TethysApp')
def test_get_active_app_request_exception1(self, mock_app, mock_log_warning):
from django.core.exceptions import ObjectDoesNotExist
# Mock up for TethysApp to raise exception, and request
mock_app.objects.get.side_effect = ObjectDoesNotExist
mock_request = mock.MagicMock()
mock_request.path = '/apps/foo/bar'
# Result should be None due to the exception
result = utilities.get_active_app(request=mock_request)
self.assertEqual(None, result)
mock_log_warning.assert_called_once_with('Could not locate app with root url "foo".')
@mock.patch('tethys_apps.utilities.tethys_log.warning')
@mock.patch('tethys_apps.models.TethysApp')
def test_get_active_app_request_exception2(self, mock_app, mock_log_warning):
from django.core.exceptions import MultipleObjectsReturned
# Mock up for TethysApp to raise exception, and request
mock_app.objects.get.side_effect = MultipleObjectsReturned
mock_request = mock.MagicMock()
mock_request.path = '/apps/foo/bar'
# Result should be None due to the exception
result = utilities.get_active_app(request=mock_request)
self.assertEqual(None, result)
mock_log_warning.assert_called_once_with('Multiple apps found with root url "foo".')
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.TethysApp')
def test_create_ps_database_setting_app_does_not_exist(self, mock_app, mock_pretty_output):
from django.core.exceptions import ObjectDoesNotExist
# Mock up for TethysApp to not exist
mock_app.objects.get.side_effect = ObjectDoesNotExist
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# ObjectDoesNotExist should be thrown, and False returned
result = utilities.create_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(False, result)
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('A Tethys App with the name', po_call_args[0][0][0])
self.assertIn('does not exist. Aborted.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.PersistentStoreDatabaseSetting')
@mock.patch('tethys_apps.models.TethysApp')
def test_create_ps_database_setting_ps_database_setting_exists(self, mock_app, mock_ps_db_setting,
mock_pretty_output):
# Mock up for TethysApp and PersistentStoreDatabaseSetting to exist
mock_app.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get.return_value = mock.MagicMock()
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# PersistentStoreDatabaseSetting should exist, and False returned
result = utilities.create_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(False, result)
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('A PersistentStoreDatabaseSetting with name', po_call_args[0][0][0])
self.assertIn('already exists. Aborted.', po_call_args[0][0][0])
@mock.patch('tethys_apps.utilities.print')
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.PersistentStoreDatabaseSetting')
@mock.patch('tethys_apps.models.TethysApp')
def test_create_ps_database_setting_ps_database_setting_exceptions(self, mock_app, mock_ps_db_setting,
mock_pretty_output, mock_print):
from django.core.exceptions import ObjectDoesNotExist
# Mock up for TethysApp to exist and PersistentStoreDatabaseSetting to throw exceptions
mock_app.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get.side_effect = ObjectDoesNotExist
mock_ps_db_setting().save.side_effect = Exception('foo exception')
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# PersistentStoreDatabaseSetting should exist, and False returned
result = utilities.create_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(False, result)
mock_ps_db_setting.assert_called()
mock_ps_db_setting().save.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('The above error was encountered. Aborted.', po_call_args[0][0][0])
rts_call_args = mock_print.call_args_list
self.assertIn('foo exception', rts_call_args[0][0][0].args[0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.PersistentStoreDatabaseSetting')
@mock.patch('tethys_apps.models.TethysApp')
def test_create_ps_database_setting_ps_database_savess(self, mock_app, mock_ps_db_setting, mock_pretty_output):
# Mock up for TethysApp to exist and PersistentStoreDatabaseSetting to not
mock_app.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get.return_value = False
mock_ps_db_setting().save.return_value = True
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# True should be returned
result = utilities.create_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(True, result)
mock_ps_db_setting.assert_called()
mock_ps_db_setting().save.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('PersistentStoreDatabaseSetting named', po_call_args[0][0][0])
self.assertIn('created successfully!', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.TethysApp')
def test_remove_ps_database_setting_app_not_exist(self, mock_app, mock_pretty_output):
from django.core.exceptions import ObjectDoesNotExist
# Mock up for TethysApp to throw an exception
mock_app.objects.get.side_effect = ObjectDoesNotExist
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# An exception will be thrown and false returned
result = utilities.remove_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(False, result)
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('A Tethys App with the name', po_call_args[0][0][0])
self.assertIn('does not exist. Aborted.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.PersistentStoreDatabaseSetting')
@mock.patch('tethys_apps.models.TethysApp')
def test_remove_ps_database_setting_psdbs_not_exist(self, mock_app, mock_ps_db_setting, mock_pretty_output):
from django.core.exceptions import ObjectDoesNotExist
# Mock up for TethysApp and PersistentStoreDatabaseSetting to throw an exception
mock_app.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get.side_effect = ObjectDoesNotExist
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# An exception will be thrown and false returned
result = utilities.remove_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(False, result)
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('An PersistentStoreDatabaseSetting with the name', po_call_args[0][0][0])
self.assertIn(' for app ', po_call_args[0][0][0])
self.assertIn('does not exist. Aborted.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.PersistentStoreDatabaseSetting')
@mock.patch('tethys_apps.models.TethysApp')
def test_remove_ps_database_setting_force_delete(self, mock_app, mock_ps_db_setting, mock_pretty_output):
# Mock up for TethysApp and PersistentStoreDatabaseSetting
mock_app.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get().delete.return_value = True
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# Delete will be called and True returned
result = utilities.remove_ps_database_setting(app_package=mock_app_package, name=mock_name, force=True)
self.assertEqual(True, result)
mock_ps_db_setting.objects.get().delete.assert_called_once()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Successfully removed PersistentStoreDatabaseSetting with name', po_call_args[0][0][0])
@mock.patch('tethys_apps.utilities.input')
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.PersistentStoreDatabaseSetting')
@mock.patch('tethys_apps.models.TethysApp')
def test_remove_ps_database_setting_proceed_delete(self, mock_app, mock_ps_db_setting, mock_pretty_output,
mock_input):
# Mock up for TethysApp and PersistentStoreDatabaseSetting
mock_app.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get().delete.return_value = True
mock_input.side_effect = ['Y']
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# Based on the raw_input, delete not called and None returned
result = utilities.remove_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(True, result)
mock_ps_db_setting.objects.get().delete.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Successfully removed PersistentStoreDatabaseSetting with name', po_call_args[0][0][0])
@mock.patch('tethys_apps.utilities.input')
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_apps.models.PersistentStoreDatabaseSetting')
@mock.patch('tethys_apps.models.TethysApp')
def test_remove_ps_database_setting_do_not_proceed(self, mock_app, mock_ps_db_setting, mock_pretty_output,
mock_input):
# Mock up for TethysApp and PersistentStoreDatabaseSetting
mock_app.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get.return_value = mock.MagicMock()
mock_ps_db_setting.objects.get().delete.return_value = True
mock_input.side_effect = ['foo', 'N']
mock_app_package = mock.MagicMock()
mock_name = mock.MagicMock()
# Based on the raw_input, delete not called and None returned
result = utilities.remove_ps_database_setting(app_package=mock_app_package, name=mock_name)
self.assertEqual(None, result)
mock_ps_db_setting.objects.get().delete.assert_not_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertEqual('Aborted. PersistentStoreDatabaseSetting not removed.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_link_service_to_app_setting_spatial_dss_does_not_exist(self, mock_service, mock_pretty_output):
from django.core.exceptions import ObjectDoesNotExist
# Mock up the SpatialDatasetService to throw ObjectDoesNotExist
mock_service.objects.get.side_effect = ObjectDoesNotExist
# Based on exception, False will be returned
result = utilities.link_service_to_app_setting(service_type='spatial', service_uid='123',
app_package='foo_app', setting_type='ds_spatial',
setting_uid='456')
self.assertEqual(False, result)
mock_service.objects.get.assert_called_once_with(pk=123)
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('with ID/Name', po_call_args[0][0][0])
self.assertIn('does not exist.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
@mock.patch('tethys_apps.models.TethysApp')
def test_link_service_to_app_setting_spatial_dss_value_error(self, mock_app, mock_service, mock_pretty_output):
from django.core.exceptions import ObjectDoesNotExist
# Mock up TethysApp to throw ObjectDoesNotExist
mock_app.objects.get.side_effect = ObjectDoesNotExist
# Mock up the SpatialDatasetService to MagicMock
mock_service.objects.get.return_value = mock.MagicMock()
# Based on ValueError exception casting to int, then TethysApp ObjectDoesNotExist False will be returned
result = utilities.link_service_to_app_setting(service_type='spatial', service_uid='foo_spatial_service',
app_package='foo_app', setting_type='ds_spatial',
setting_uid='456')
self.assertEqual(False, result)
mock_service.objects.get.assert_called_once_with(name='foo_spatial_service')
mock_app.objects.get.assert_called_once_with(package='foo_app')
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('A Tethys App with the name', po_call_args[0][0][0])
self.assertIn('does not exist. Aborted.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
@mock.patch('tethys_apps.models.TethysApp')
def test_link_service_to_app_setting_spatial_link_key_error(self, mock_app, mock_service, mock_pretty_output):
# Mock up TethysApp to MagicMock
mock_app.objects.get.return_value = mock.MagicMock()
# Mock up the SpatialDatasetService to MagicMock
mock_service.objects.get.return_value = mock.MagicMock()
# Based on KeyError for invalid setting_type False will be returned
result = utilities.link_service_to_app_setting(service_type='spatial', service_uid='foo_spatial_service',
app_package='foo_app', setting_type='foo_invalid',
setting_uid='456')
self.assertEqual(False, result)
mock_service.objects.get.assert_called_once_with(name='foo_spatial_service')
mock_app.objects.get.assert_called_once_with(package='foo_app')
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('The setting_type you specified ("foo_invalid") does not exist.', po_call_args[0][0][0])
self.assertIn('Choose from: "ps_database|ps_connection|ds_spatial"', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_sdk.app_settings.SpatialDatasetServiceSetting')
@mock.patch('tethys_services.models.SpatialDatasetService')
@mock.patch('tethys_apps.models.TethysApp')
def test_link_service_to_app_setting_spatial_link_value_error_save(self, mock_app, mock_service, mock_setting,
mock_pretty_output):
# Mock up TethysApp to MagicMock
mock_app.objects.get.return_value = mock.MagicMock()
# Mock up the SpatialDatasetService to MagicMock
mock_service.objects.get.return_value = mock.MagicMock()
# Mock up the SpatialDatasetServiceSetting to MagicMock
mock_setting.objects.get.return_value = mock.MagicMock()
mock_setting.objects.get().save.return_value = True
# True will be returned, mocked save will be called
result = utilities.link_service_to_app_setting(service_type='spatial', service_uid='foo_spatial_service',
app_package='foo_app', setting_type='ds_spatial',
setting_uid='foo_456')
self.assertEqual(True, result)
mock_service.objects.get.assert_called_once_with(name='foo_spatial_service')
mock_app.objects.get.assert_called_once_with(package='foo_app')
mock_setting.objects.get.assert_called()
mock_setting.objects.get().save.assert_called_once()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('was successfully linked to', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.cli_colors.pretty_output')
@mock.patch('tethys_sdk.app_settings.SpatialDatasetServiceSetting')
@mock.patch('tethys_services.models.SpatialDatasetService')
@mock.patch('tethys_apps.models.TethysApp')
def test_link_service_to_app_setting_spatial_link_does_not_exist(self, mock_app, mock_service, mock_setting,
mock_pretty_output):
from django.core.exceptions import ObjectDoesNotExist
# Mock up TethysApp to MagicMock
mock_app.objects.get.return_value = mock.MagicMock()
# Mock up the SpatialDatasetService to MagicMock
mock_service.objects.get.return_value = mock.MagicMock()
# Mock up the SpatialDatasetServiceSetting to MagicMock
mock_setting.objects.get.side_effect = ObjectDoesNotExist
# Based on KeyError for invalid setting_type False will be returned
result = utilities.link_service_to_app_setting(service_type='spatial', service_uid='foo_spatial_service',
app_package='foo_app', setting_type='ds_spatial',
setting_uid='456')
self.assertEqual(False, result)
mock_service.objects.get.assert_called_once_with(name='foo_spatial_service')
mock_app.objects.get.assert_called_once_with(package='foo_app')
mock_setting.objects.get.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('with ID/Name', po_call_args[0][0][0])
self.assertIn('does not exist.', po_call_args[0][0][0])
| 51.028926
| 115
| 0.69783
| 3,160
| 24,698
| 5.128481
| 0.060443
| 0.034061
| 0.04813
| 0.052758
| 0.90158
| 0.885413
| 0.880476
| 0.85493
| 0.834814
| 0.812909
| 0
| 0.006367
| 0.211515
| 24,698
| 483
| 116
| 51.134576
| 0.825819
| 0.118714
| 0
| 0.748538
| 0
| 0
| 0.167619
| 0.116306
| 0
| 0
| 0
| 0
| 0.280702
| 1
| 0.078947
| false
| 0.005848
| 0.038012
| 0
| 0.119883
| 0.008772
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58ed0787871a9b5fdcb02d2085fd49f6677b5373
| 10,637
|
py
|
Python
|
dbbackup/tests/test_connectors/test_postgresql.py
|
KessoumML/django-dbbackup
|
4f2878e0b007c6788b76c83aac1e9a858a4e17fa
|
[
"BSD-3-Clause"
] | null | null | null |
dbbackup/tests/test_connectors/test_postgresql.py
|
KessoumML/django-dbbackup
|
4f2878e0b007c6788b76c83aac1e9a858a4e17fa
|
[
"BSD-3-Clause"
] | null | null | null |
dbbackup/tests/test_connectors/test_postgresql.py
|
KessoumML/django-dbbackup
|
4f2878e0b007c6788b76c83aac1e9a858a4e17fa
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import unicode_literals
from io import BytesIO
from django.test import TestCase
from mock import patch
from dbbackup.db.exceptions import DumpError
from dbbackup.db.postgresql import (
PgDumpBinaryConnector,
PgDumpConnector,
PgDumpGisConnector,
)
@patch('dbbackup.db.postgresql.PgDumpConnector.run_command',
return_value=(BytesIO(b'foo'), BytesIO()))
class PgDumpConnectorTest(TestCase):
def setUp(self):
self.connector = PgDumpConnector()
self.connector.settings['ENGINE'] = 'django.db.backends.postgresql'
self.connector.settings['NAME'] = 'dbname'
self.connector.settings['HOST'] = 'hostname'
def test_user_password_uses_special_characters(self, mock_dump_cmd):
self.connector.settings['PASSWORD'] = '@!'
self.connector.settings['USER'] = '@'
self.connector.create_dump()
self.assertIn('postgresql://%40:%40%21@hostname/dbname', mock_dump_cmd.call_args[0][0])
def test_create_dump(self, mock_dump_cmd):
dump = self.connector.create_dump()
# Test dump
dump_content = dump.read()
self.assertTrue(dump_content)
self.assertEqual(dump_content, b'foo')
# Test cmd
self.assertTrue(mock_dump_cmd.called)
def test_create_dump_without_host_raises_error(self, mock_dump_cmd):
self.connector.settings.pop('HOST', None)
with self.assertRaises(DumpError):
self.connector.create_dump()
def test_password_but_no_user(self, mock_dump_cmd):
self.connector.settings.pop('USER', None)
self.connector.settings['PASSWORD'] = 'hello'
self.connector.create_dump()
self.assertIn('postgresql://hostname/dbname', mock_dump_cmd.call_args[0][0])
def test_create_dump_host(self, mock_dump_cmd):
# With
self.connector.settings['HOST'] = 'foo'
self.connector.create_dump()
self.assertIn('postgresql://foo/dbname', mock_dump_cmd.call_args[0][0])
def test_create_dump_port(self, mock_dump_cmd):
# Without
self.connector.settings.pop('PORT', None)
self.connector.create_dump()
self.assertIn('postgresql://hostname/dbname', mock_dump_cmd.call_args[0][0])
# With
self.connector.settings['PORT'] = 42
self.connector.create_dump()
self.assertIn('postgresql://hostname:42/dbname', mock_dump_cmd.call_args[0][0])
def test_create_dump_user(self, mock_dump_cmd):
# Without
self.connector.settings.pop('USER', None)
self.connector.create_dump()
self.assertIn('postgresql://hostname/dbname', mock_dump_cmd.call_args[0][0])
# With
self.connector.settings['USER'] = 'foo'
self.connector.create_dump()
self.assertIn('postgresql://foo@hostname/dbname', mock_dump_cmd.call_args[0][0])
def test_create_dump_exclude(self, mock_dump_cmd):
# Without
self.connector.create_dump()
self.assertNotIn(' --exclude-table-data=', mock_dump_cmd.call_args[0][0])
# With
self.connector.exclude = ('foo',)
self.connector.create_dump()
self.assertIn(' --exclude-table-data=foo', mock_dump_cmd.call_args[0][0])
# With serveral
self.connector.exclude = ('foo', 'bar')
self.connector.create_dump()
self.assertIn(' --exclude-table-data=foo', mock_dump_cmd.call_args[0][0])
self.assertIn(' --exclude-table-data=bar', mock_dump_cmd.call_args[0][0])
def test_create_dump_drop(self, mock_dump_cmd):
# Without
self.connector.drop = False
self.connector.create_dump()
self.assertNotIn(' --clean', mock_dump_cmd.call_args[0][0])
# With
self.connector.drop = True
self.connector.create_dump()
self.assertIn(' --clean', mock_dump_cmd.call_args[0][0])
@patch('dbbackup.db.postgresql.PgDumpConnector.run_command',
return_value=(BytesIO(), BytesIO()))
def test_restore_dump(self, mock_dump_cmd, mock_restore_cmd):
dump = self.connector.create_dump()
self.connector.restore_dump(dump)
# Test cmd
self.assertTrue(mock_restore_cmd.called)
@patch('dbbackup.db.postgresql.PgDumpConnector.run_command',
return_value=(BytesIO(), BytesIO()))
def test_restore_dump_user(self, mock_dump_cmd, mock_restore_cmd):
dump = self.connector.create_dump()
# Without
self.connector.settings.pop('USER', None)
self.connector.restore_dump(dump)
self.assertIn(
'postgresql://hostname/dbname',
mock_restore_cmd.call_args[0][0]
)
self.assertNotIn(' --username=', mock_restore_cmd.call_args[0][0])
# With
self.connector.settings['USER'] = 'foo'
self.connector.restore_dump(dump)
self.assertIn(
'postgresql://foo@hostname/dbname',
mock_restore_cmd.call_args[0][0]
)
@patch('dbbackup.db.postgresql.PgDumpBinaryConnector.run_command',
return_value=(BytesIO(b'foo'), BytesIO()))
class PgDumpBinaryConnectorTest(TestCase):
def setUp(self):
self.connector = PgDumpBinaryConnector()
self.connector.settings['HOST'] = 'hostname'
self.connector.settings['ENGINE'] = 'django.db.backends.postgresql'
self.connector.settings['NAME'] = 'dbname'
def test_create_dump(self, mock_dump_cmd):
dump = self.connector.create_dump()
# Test dump
dump_content = dump.read()
self.assertTrue(dump_content)
self.assertEqual(dump_content, b'foo')
# Test cmd
self.assertTrue(mock_dump_cmd.called)
self.assertIn('--format=custom', mock_dump_cmd.call_args[0][0])
def test_create_dump_exclude(self, mock_dump_cmd):
# Without
self.connector.create_dump()
self.assertNotIn(' --exclude-table-data=', mock_dump_cmd.call_args[0][0])
# With
self.connector.exclude = ('foo',)
self.connector.create_dump()
self.assertIn(' --exclude-table-data=foo', mock_dump_cmd.call_args[0][0])
# With serveral
self.connector.exclude = ('foo', 'bar')
self.connector.create_dump()
self.assertIn(' --exclude-table-data=foo', mock_dump_cmd.call_args[0][0])
self.assertIn(' --exclude-table-data=bar', mock_dump_cmd.call_args[0][0])
def test_create_dump_drop(self, mock_dump_cmd):
# Without
self.connector.drop = False
self.connector.create_dump()
self.assertNotIn(' --clean', mock_dump_cmd.call_args[0][0])
# Binary drop at restore level
self.connector.drop = True
self.connector.create_dump()
self.assertNotIn(' --clean', mock_dump_cmd.call_args[0][0])
@patch('dbbackup.db.postgresql.PgDumpBinaryConnector.run_command',
return_value=(BytesIO(), BytesIO()))
def test_restore_dump(self, mock_dump_cmd, mock_restore_cmd):
dump = self.connector.create_dump()
self.connector.restore_dump(dump)
# Test cmd
self.assertTrue(mock_restore_cmd.called)
@patch('dbbackup.db.postgresql.PgDumpGisConnector.run_command',
return_value=(BytesIO(b'foo'), BytesIO()))
class PgDumpGisConnectorTest(TestCase):
def setUp(self):
self.connector = PgDumpGisConnector()
self.connector.settings['HOST'] = 'hostname'
@patch('dbbackup.db.postgresql.PgDumpGisConnector.run_command',
return_value=(BytesIO(b'foo'), BytesIO()))
def test_restore_dump(self, mock_dump_cmd, mock_restore_cmd):
dump = self.connector.create_dump()
# Without ADMINUSER
self.connector.settings.pop('ADMIN_USER', None)
self.connector.restore_dump(dump)
self.assertTrue(mock_restore_cmd.called)
# With
self.connector.settings['ADMIN_USER'] = 'foo'
self.connector.restore_dump(dump)
self.assertTrue(mock_restore_cmd.called)
def test_enable_postgis(self, mock_dump_cmd):
self.connector.settings['ADMIN_USER'] = 'foo'
self.connector._enable_postgis()
self.assertIn('"CREATE EXTENSION IF NOT EXISTS postgis;"', mock_dump_cmd.call_args[0][0])
self.assertIn('--username=foo', mock_dump_cmd.call_args[0][0])
def test_enable_postgis_host(self, mock_dump_cmd):
self.connector.settings['ADMIN_USER'] = 'foo'
# Without
self.connector.settings.pop('HOST', None)
self.connector._enable_postgis()
self.assertNotIn(' --host=', mock_dump_cmd.call_args[0][0])
# With
self.connector.settings['HOST'] = 'foo'
self.connector._enable_postgis()
self.assertIn(' --host=foo', mock_dump_cmd.call_args[0][0])
def test_enable_postgis_port(self, mock_dump_cmd):
self.connector.settings['ADMIN_USER'] = 'foo'
# Without
self.connector.settings.pop('PORT', None)
self.connector._enable_postgis()
self.assertNotIn(' --port=', mock_dump_cmd.call_args[0][0])
# With
self.connector.settings['PORT'] = 42
self.connector._enable_postgis()
self.assertIn(' --port=42', mock_dump_cmd.call_args[0][0])
@patch('dbbackup.db.base.Popen', **{
'return_value.wait.return_value': True,
'return_value.poll.return_value': False,
})
class PgDumpConnectorRunCommandTest(TestCase):
def test_run_command(self, mock_popen):
connector = PgDumpConnector()
connector.settings['HOST'] = 'hostname'
connector.create_dump()
self.assertEqual(mock_popen.call_args[0][0][0], 'pg_dump')
def test_run_command_with_password(self, mock_popen):
connector = PgDumpConnector()
connector.settings['HOST'] = 'hostname'
connector.settings['PASSWORD'] = 'foo'
connector.create_dump()
self.assertEqual(mock_popen.call_args[0][0][0], 'pg_dump')
self.assertIn('PGPASSWORD', mock_popen.call_args[1]['env'])
self.assertEqual('foo', mock_popen.call_args[1]['env']['PGPASSWORD'])
def test_run_command_with_password_and_other(self, mock_popen):
connector = PgDumpConnector(env={'foo': 'bar'})
connector.settings['HOST'] = 'hostname'
connector.settings['PASSWORD'] = 'foo'
connector.create_dump()
self.assertEqual(mock_popen.call_args[0][0][0], 'pg_dump')
self.assertIn('foo', mock_popen.call_args[1]['env'])
self.assertEqual('bar', mock_popen.call_args[1]['env']['foo'])
self.assertIn('PGPASSWORD', mock_popen.call_args[1]['env'])
self.assertEqual('foo', mock_popen.call_args[1]['env']['PGPASSWORD'])
| 39.988722
| 97
| 0.662029
| 1,300
| 10,637
| 5.174615
| 0.083077
| 0.143006
| 0.076854
| 0.04757
| 0.867251
| 0.841831
| 0.806898
| 0.774491
| 0.721718
| 0.656162
| 0
| 0.010245
| 0.201655
| 10,637
| 265
| 98
| 40.139623
| 0.781912
| 0.023691
| 0
| 0.703518
| 0
| 0
| 0.148527
| 0.092999
| 0
| 0
| 0
| 0
| 0.246231
| 1
| 0.125628
| false
| 0.060302
| 0.030151
| 0
| 0.175879
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4516bb7b79a23c9381a92e78c56efbc8318f665d
| 1,417
|
py
|
Python
|
FFT.py
|
cqb98/fft
|
3eb5a803e8712ae2821ac91930d552972ce888c5
|
[
"BSD-2-Clause"
] | 1
|
2022-02-16T22:57:14.000Z
|
2022-02-16T22:57:14.000Z
|
FFT.py
|
cqb98/fft
|
3eb5a803e8712ae2821ac91930d552972ce888c5
|
[
"BSD-2-Clause"
] | null | null | null |
FFT.py
|
cqb98/fft
|
3eb5a803e8712ae2821ac91930d552972ce888c5
|
[
"BSD-2-Clause"
] | null | null | null |
import math
def FFT(f,power):
len=0b1<<power;
F=[]
for i in range(len):
t=0;
for j in [0b1<<x for x in range(power)]:
t<<=1
if(i&j):
t|=0b1;
F.append(f[t]/len);
angs=list(map(lambda i:-2*math.pi*i/len,range(len>>1)))
sins=list(map(math.sin,angs))
coss=list(map(math.cos,angs))
W=list(map(complex,coss,sins))
for i in range(1,power+1):
dftnum=len>>i;
dftlen=1<<i;
dftlen_2=dftlen>>1;
for j in range(dftlen_2):
f1=j;
f2=f1+dftlen_2;
#print(j*dftnum)
coef=W[j*dftnum]
for k in range(dftnum):
odd=F[f1];
even=F[f2];
temp=even*coef;
F[f1]=odd+temp;
F[f2]=odd-temp;
f1+=dftlen;
f2+=dftlen;
return F;
def iFFT(f,power):
len=0b1<<power;
F=[]
for i in range(len):
t=0;
for j in [0b1<<x for x in range(power)]:
t<<=1
if(i&j):
t|=0b1;
F.append(f[t]);
angs=list(map(lambda i:2*math.pi*i/len,range(len>>1)))
sins=list(map(math.sin,angs))
coss=list(map(math.cos,angs))
W=list(map(complex,coss,sins))
for i in range(1,power+1):
dftnum=len>>i;
dftlen=1<<i;
dftlen_2=dftlen>>1;
for j in range(dftlen_2):
f1=j;
f2=f1+dftlen_2;
#print(j*dftnum)
coef=W[j*dftnum]
for k in range(dftnum):
odd=F[f1];
even=F[f2];
temp=even*coef;
F[f1]=odd+temp;
F[f2]=odd-temp;
f1+=dftlen;
f2+=dftlen;
return F;
| 18.402597
| 57
| 0.55187
| 265
| 1,417
| 2.928302
| 0.162264
| 0.090206
| 0.030928
| 0.056701
| 0.966495
| 0.966495
| 0.966495
| 0.966495
| 0.966495
| 0.966495
| 0
| 0.048964
| 0.250529
| 1,417
| 76
| 58
| 18.644737
| 0.681733
| 0.021171
| 0
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031746
| false
| 0
| 0.015873
| 0
| 0.079365
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e13f5ff7e110efc221e02f528f7e899774adfda9
| 13,832
|
py
|
Python
|
src/acapela_group/base.py
|
Ge0/acapela-group
|
cb04f8ebb52accb7fbf2c703f6cc061913870ec0
|
[
"MIT"
] | 1
|
2022-01-07T19:38:21.000Z
|
2022-01-07T19:38:21.000Z
|
src/acapela_group/base.py
|
Ge0/acapela-group
|
cb04f8ebb52accb7fbf2c703f6cc061913870ec0
|
[
"MIT"
] | null | null | null |
src/acapela_group/base.py
|
Ge0/acapela-group
|
cb04f8ebb52accb7fbf2c703f6cc061913870ec0
|
[
"MIT"
] | 1
|
2022-01-07T19:36:37.000Z
|
2022-01-07T19:36:37.000Z
|
"""Base classes for Acapela Group website communication."""
import aiohttp
import re
from urllib.parse import urlparse
import requests
from .language import LANGUAGES
_MP3_REGEX = re.compile(r"var myPhpVar = '(.+?)';")
class AcapelaGroupError(Exception):
"""Base exception class for Acapela Group related errors."""
class TooManyInvalidLoginAttemptsError(AcapelaGroupError):
"""Exception class thrown when locked out for too many login attempts."""
class InvalidCredentialsError(AcapelaGroupError):
"""Exception class for invalid credentials error."""
class NeedsUpdateError(AcapelaGroupError):
"""Exception class thrown when the code cannot scrap the website.
Basically, it means that the module needs some update to keep interfacing
with the Acapela Group website.
"""
class LanguageNotSupportedError(AcapelaGroupError):
"""Exception class thrown when the language is not supported.
For a complete list of supported languages, see language.py.
"""
class AcapelaGroupAsync:
"""Asynchronous client class for Acapela Group website interaction."""
def __init__(self, base_url="http://www.acapela-group.com"):
"""Create an asynchronous AcapelaGroup session handler."""
self._base_url = base_url
self._http_session = None
async def __aenter__(self):
"""Instantiate an http session with AcapelaGroup."""
self._http_session = aiohttp.ClientSession()
return self
async def __aexit__(self, exc_type, exc, tb):
"""Uninstantiate the http session with AcapelaGroup."""
self._http_session.close()
@property
def base_url(self):
"""str: Get the base url of the instance.
Being able to set the base url can be useful for testing purposes. The
base url cannot be changed once the instance has been created.
"""
return self._base_url
def build_url(self, path=''):
"""Build a full URL with `self.base_url` and `path`.
The result is simply `self.base_url`/`path`.
Args:
path (str): The path to build the URL with. Defaults to ''.
Example:
if the base url is 'http://www.acapela-group.com' and that the
path is 'wp-login.php', then the method will return
'http://www.acapela-group.com/wp-login.php'.
Returns:
str: Build url.
"""
return '{}/{}'.format(self._base_url, path)
async def authenticate(self, username: str, password: str):
"""Authenticate against the website using `login` and `password`.
The session will use the provided credentials to scrap the website.
It is useful mostly for retrieving sound with no background music
set when an anonymous user listens to a text-to-speech sound.
To obtain some credentials, you must register here:
http://www.acapela-group.com/register/
Args:
username: The account's username used for registration.
password: The account's password used for registration.
Note:
Be careful: Acapela Group does not use HTTPS, so your credentials
are passing through networks as plain text. If no exception
is raised, then the authentication succeeded.
Raises:
AcapelaGroupError: something went wrong while authenticating.
"""
data = {
'log': username,
'pwd': password,
'wp-submit': '',
'redirect_to': self.build_url() # Redirect to the index.
}
response = await self._http_session.post(
self.build_url('wp-login.php'),
allow_redirects=False,
data=data)
text = await response.text()
if text == \
("You have been locked out due to "
"too many invalid login attempts."):
raise TooManyInvalidLoginAttemptsError(
"Looks like you are screwed because of too many login "
"attempts. Try with another IP maybe.")
try:
location = response.headers["Location"]
except KeyError as exn:
raise NeedsUpdateError(
"Could not get Location header from login. "
"The module might need an update.") from exn
else:
parse_result = urlparse(location)
if parse_result.path == '/login/' and \
parse_result.query == 'the_error=incorrect_password':
raise InvalidCredentialsError(
"Wrong couple of login/password.")
# Go to the index to simulate the Location.
await self._http_session.get(location)
async def get_mp3_url(self, language, voice, text):
"""Retrieve the mp3 url associated to the settings.
To see the list of supported languages, check the `language` module.
Args:
language (str): The language to use for the acapela.
voice (str): The voice name to use for the acapela.
text (str): the text to translate to speech.
Raises:
NeedsUpdateError: The module needs an update since the mp3
url could not have been extracted, somehow.
Returns:
str: An HTTP url pointing to the generated mp3.
"""
try:
language_code = LANGUAGES[language.upper()]
except KeyError:
raise LanguageNotSupportedError(
"The language {} is not supported.".format(language))
target = self.build_url(
"demo-tts/DemoHTML5Form_V2.php?langdemo=Powered+by+"
"<a+href=\"http://www.acapela-vaas.com\">Acapela+Vo"
"ice+as+a+Service</a>.+For+demo+and+evaluation+purp"
"ose+only,+for+commercial+use+of+generated+sound+fi"
"les+please+go+to+<a+href=\"http://www.acapela-box."
"com\">www.acapela-box.com</a>")
# What is that for?!
data = {
'0': 'Leila',
'1': 'Laia',
'2': 'Eliska',
'3': 'Mette',
'4': 'Zoe',
'5': 'Jasmijn',
'6': 'Tyler',
'7': 'Deepa',
'8': 'Rhona',
'9': 'Rachel',
'10': 'Sharon',
'11': 'Hanna',
'12': 'Sanna',
'13': 'Manon-be',
'14': 'Louise',
'16': 'Claudia',
'17': 'Dimitris',
'18': 'Fabiana',
'19': 'Sakura',
'20': 'Minji',
'21': 'Lulu',
'22': 'Bente',
'23': 'Ania',
'24': 'Marcia',
'25': 'Celia',
'26': 'Alyona',
'27': 'Biera',
'28': 'Ines',
'29': 'Rodrigo',
'30': 'Elin',
'31': 'Samuel',
'32': 'Kal',
'33': 'Mia',
'34': 'Ipek',
# Here this is clearer:
'MyLanguages': language_code,
'MySelectedVoice': voice,
'MyTextForTTS': text,
'agreeterms': 'on',
't': '1', # Don't know about that one.
'SendToVaaS': '',
}
response = await self._http_session.post(target, data=data)
text = await response.text()
results = _MP3_REGEX.search(text)
if results is None:
raise NeedsUpdateError("Could not extract mp3 url pattern. "
"Check the language or the voice name.")
return results.group(1)
class AcapelaGroup:
"""Client class for Acapela Group website interaction."""
def __init__(self, base_url="http://www.acapela-group.com"):
"""Create an AcapelaGroup session handler."""
self._base_url = base_url
self._http_session = requests.Session()
@property
def base_url(self):
"""str: Get the base url of the instance.
Being able to set the base url can be useful for testing purposes. The
base url cannot be changed once the instance has been created.
"""
return self._base_url
def build_url(self, path=''):
"""Build a full URL with `self.base_url` and `path`.
The result is simply `self.base_url`/`path`.
Args:
path (str): The path to build the URL with. Defaults to ''.
Example:
if the base url is 'http://www.acapela-group.com' and that the
path is 'wp-login.php', then the method will return
'http://www.acapela-group.com/wp-login.php'.
Returns:
str: Build url.
"""
return '{}/{}'.format(self._base_url, path)
def get_mp3_url(self, language, voice, text):
"""Retrieve the mp3 url associated to the settings.
To see the list of supported languages, check the `language` module.
Args:
language (str): The language to use for the acapela.
voice (str): The voice name to use for the acapela.
text (str): the text to translate to speech.
Raises:
NeedsUpdateError: The module needs an update since the mp3
url could not have been extracted, somehow.
Returns:
str: An HTTP url pointing to the generated mp3.
"""
try:
language_code = LANGUAGES[language.upper()]
except KeyError:
raise LanguageNotSupportedError(
"The language {} is not supported.".format(language))
target = self.build_url(
"demo-tts/DemoHTML5Form_V2.php?langdemo=Powered+by+"
"<a+href=\"http://www.acapela-vaas.com\">Acapela+Vo"
"ice+as+a+Service</a>.+For+demo+and+evaluation+purp"
"ose+only,+for+commercial+use+of+generated+sound+fi"
"les+please+go+to+<a+href=\"http://www.acapela-box."
"com\">www.acapela-box.com</a>")
# What is that for?!
data = {
'0': 'Leila',
'1': 'Laia',
'2': 'Eliska',
'3': 'Mette',
'4': 'Zoe',
'5': 'Jasmijn',
'6': 'Tyler',
'7': 'Deepa',
'8': 'Rhona',
'9': 'Rachel',
'10': 'Sharon',
'11': 'Hanna',
'12': 'Sanna',
'13': 'Manon-be',
'14': 'Louise',
'16': 'Claudia',
'17': 'Dimitris',
'18': 'Fabiana',
'19': 'Sakura',
'20': 'Minji',
'21': 'Lulu',
'22': 'Bente',
'23': 'Ania',
'24': 'Marcia',
'25': 'Celia',
'26': 'Alyona',
'27': 'Biera',
'28': 'Ines',
'29': 'Rodrigo',
'30': 'Elin',
'31': 'Samuel',
'32': 'Kal',
'33': 'Mia',
'34': 'Ipek',
# Here this is clearer:
'MyLanguages': language_code,
'MySelectedVoice': voice,
'MyTextForTTS': text,
'agreeterms': 'on',
't': '1', # Don't know about that one.
'SendToVaaS': '',
}
response = self._http_session.post(target, data=data)
results = _MP3_REGEX.search(response.text)
if results is None:
raise NeedsUpdateError("Could not extract mp3 url pattern. "
"Check the language or the voice name.")
return results.group(1)
def authenticate(self, username: str, password: str):
"""Authenticate against the website using `login` and `password`.
The session will use the provided credentials to scrap the website.
It is useful mostly for retrieving sound with no background music
set when an anonymous user listens to a text-to-speech sound.
To obtain some credentials, you must register here:
http://www.acapela-group.com/register/
Args:
username: The account's username used for registration.
password: The account's password used for registration.
Note:
Be careful: Acapela Group does not use HTTPS, so your credentials
are passing through networks as plain text. If no exception
is raised, then the authentication succeeded.
Raises:
AcapelaGroupError: something went wrong while authenticating.
"""
data = {
'log': username,
'pwd': password,
'wp-submit': '',
'redirect_to': self.build_url() # Redirect to the index.
}
response = self._http_session.post(self.build_url('wp-login.php'),
allow_redirects=False,
data=data)
if response.text == \
("You have been locked out due to "
"too many invalid login attempts."):
raise TooManyInvalidLoginAttemptsError(
"Looks like you are screwed because of too many login "
"attempts. Try with another IP maybe.")
try:
location = response.headers["Location"]
except KeyError as exn:
raise NeedsUpdateError(
"Could not get Location header from login. "
"The module might need an update.") from exn
else:
parse_result = urlparse(location)
if parse_result.path == '/login/' and \
parse_result.query == 'the_error=incorrect_password':
raise InvalidCredentialsError(
"Wrong couple of login/password.")
# Go to the index to simulate the Location.
self._http_session.get(location)
| 33.572816
| 78
| 0.549957
| 1,532
| 13,832
| 4.902089
| 0.220627
| 0.02237
| 0.017577
| 0.02024
| 0.882956
| 0.867643
| 0.849001
| 0.829028
| 0.829028
| 0.829028
| 0
| 0.015053
| 0.342033
| 13,832
| 411
| 79
| 33.654501
| 0.810131
| 0.240312
| 0
| 0.820277
| 0
| 0
| 0.234966
| 0.048491
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036866
| false
| 0.036866
| 0.023041
| 0
| 0.124424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e160bf685355824cbfcf5c5a49714b9cc40d698e
| 19,429
|
py
|
Python
|
sdk/python/pulumi_oci/core/drg_route_distribution_statement.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/core/drg_route_distribution_statement.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/core/drg_route_distribution_statement.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DrgRouteDistributionStatementArgs', 'DrgRouteDistributionStatement']
@pulumi.input_type
class DrgRouteDistributionStatementArgs:
def __init__(__self__, *,
action: pulumi.Input[str],
drg_route_distribution_id: pulumi.Input[str],
match_criteria: pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs'],
priority: pulumi.Input[int]):
"""
The set of arguments for constructing a DrgRouteDistributionStatement resource.
:param pulumi.Input[str] action: Accept: import/export the route "as is"
:param pulumi.Input[str] drg_route_distribution_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the route distribution.
:param pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs'] match_criteria: (Updatable) The action is applied only if all of the match criteria is met. If there are no match criteria in a statement, match ALL is implied.
:param pulumi.Input[int] priority: (Updatable) This field is used to specify the priority of each statement in a route distribution. The priority will be represented as a number between 0 and 65535 where a lower number indicates a higher priority. When a route is processed, statements are applied in the order defined by their priority. The first matching rule dictates the action that will be taken on the route.
"""
pulumi.set(__self__, "action", action)
pulumi.set(__self__, "drg_route_distribution_id", drg_route_distribution_id)
pulumi.set(__self__, "match_criteria", match_criteria)
pulumi.set(__self__, "priority", priority)
@property
@pulumi.getter
def action(self) -> pulumi.Input[str]:
"""
Accept: import/export the route "as is"
"""
return pulumi.get(self, "action")
@action.setter
def action(self, value: pulumi.Input[str]):
pulumi.set(self, "action", value)
@property
@pulumi.getter(name="drgRouteDistributionId")
def drg_route_distribution_id(self) -> pulumi.Input[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the route distribution.
"""
return pulumi.get(self, "drg_route_distribution_id")
@drg_route_distribution_id.setter
def drg_route_distribution_id(self, value: pulumi.Input[str]):
pulumi.set(self, "drg_route_distribution_id", value)
@property
@pulumi.getter(name="matchCriteria")
def match_criteria(self) -> pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs']:
"""
(Updatable) The action is applied only if all of the match criteria is met. If there are no match criteria in a statement, match ALL is implied.
"""
return pulumi.get(self, "match_criteria")
@match_criteria.setter
def match_criteria(self, value: pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs']):
pulumi.set(self, "match_criteria", value)
@property
@pulumi.getter
def priority(self) -> pulumi.Input[int]:
"""
(Updatable) This field is used to specify the priority of each statement in a route distribution. The priority will be represented as a number between 0 and 65535 where a lower number indicates a higher priority. When a route is processed, statements are applied in the order defined by their priority. The first matching rule dictates the action that will be taken on the route.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: pulumi.Input[int]):
pulumi.set(self, "priority", value)
@pulumi.input_type
class _DrgRouteDistributionStatementState:
def __init__(__self__, *,
action: Optional[pulumi.Input[str]] = None,
drg_route_distribution_id: Optional[pulumi.Input[str]] = None,
match_criteria: Optional[pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs']] = None,
priority: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering DrgRouteDistributionStatement resources.
:param pulumi.Input[str] action: Accept: import/export the route "as is"
:param pulumi.Input[str] drg_route_distribution_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the route distribution.
:param pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs'] match_criteria: (Updatable) The action is applied only if all of the match criteria is met. If there are no match criteria in a statement, match ALL is implied.
:param pulumi.Input[int] priority: (Updatable) This field is used to specify the priority of each statement in a route distribution. The priority will be represented as a number between 0 and 65535 where a lower number indicates a higher priority. When a route is processed, statements are applied in the order defined by their priority. The first matching rule dictates the action that will be taken on the route.
"""
if action is not None:
pulumi.set(__self__, "action", action)
if drg_route_distribution_id is not None:
pulumi.set(__self__, "drg_route_distribution_id", drg_route_distribution_id)
if match_criteria is not None:
pulumi.set(__self__, "match_criteria", match_criteria)
if priority is not None:
pulumi.set(__self__, "priority", priority)
@property
@pulumi.getter
def action(self) -> Optional[pulumi.Input[str]]:
"""
Accept: import/export the route "as is"
"""
return pulumi.get(self, "action")
@action.setter
def action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "action", value)
@property
@pulumi.getter(name="drgRouteDistributionId")
def drg_route_distribution_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the route distribution.
"""
return pulumi.get(self, "drg_route_distribution_id")
@drg_route_distribution_id.setter
def drg_route_distribution_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "drg_route_distribution_id", value)
@property
@pulumi.getter(name="matchCriteria")
def match_criteria(self) -> Optional[pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs']]:
"""
(Updatable) The action is applied only if all of the match criteria is met. If there are no match criteria in a statement, match ALL is implied.
"""
return pulumi.get(self, "match_criteria")
@match_criteria.setter
def match_criteria(self, value: Optional[pulumi.Input['DrgRouteDistributionStatementMatchCriteriaArgs']]):
pulumi.set(self, "match_criteria", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) This field is used to specify the priority of each statement in a route distribution. The priority will be represented as a number between 0 and 65535 where a lower number indicates a higher priority. When a route is processed, statements are applied in the order defined by their priority. The first matching rule dictates the action that will be taken on the route.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
class DrgRouteDistributionStatement(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
drg_route_distribution_id: Optional[pulumi.Input[str]] = None,
match_criteria: Optional[pulumi.Input[pulumi.InputType['DrgRouteDistributionStatementMatchCriteriaArgs']]] = None,
priority: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
This resource provides the Drg Route Distribution Statement resource in Oracle Cloud Infrastructure Core service.
Adds one route distribution statement to the specified route distribution.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_drg_route_distribution_statement = oci.core.DrgRouteDistributionStatement("testDrgRouteDistributionStatement",
drg_route_distribution_id=oci_core_drg_route_distribution["test_drg_route_distribution"]["id"],
action=var["drg_route_distribution_statement_statements_action"],
match_criteria=oci.core.DrgRouteDistributionStatementMatchCriteriaArgs(
match_type=var["drg_route_distribution_statement_statements_match_criteria_match_type"],
attachment_type=var["drg_route_distribution_statement_statements_match_criteria_attachment_type"],
drg_attachment_id=oci_core_drg_attachment["test_drg_attachment"]["id"],
),
priority=var["drg_route_distribution_statement_statements_priority"])
```
## Import
DrgRouteDistributionStatement can be imported using the `id`, e.g.
```sh
$ pulumi import oci:core/drgRouteDistributionStatement:DrgRouteDistributionStatement test_drg_route_distribution_statement "drgRouteDistributions/{drgRouteDistributionId}/statements/{id}"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] action: Accept: import/export the route "as is"
:param pulumi.Input[str] drg_route_distribution_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the route distribution.
:param pulumi.Input[pulumi.InputType['DrgRouteDistributionStatementMatchCriteriaArgs']] match_criteria: (Updatable) The action is applied only if all of the match criteria is met. If there are no match criteria in a statement, match ALL is implied.
:param pulumi.Input[int] priority: (Updatable) This field is used to specify the priority of each statement in a route distribution. The priority will be represented as a number between 0 and 65535 where a lower number indicates a higher priority. When a route is processed, statements are applied in the order defined by their priority. The first matching rule dictates the action that will be taken on the route.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DrgRouteDistributionStatementArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Drg Route Distribution Statement resource in Oracle Cloud Infrastructure Core service.
Adds one route distribution statement to the specified route distribution.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_drg_route_distribution_statement = oci.core.DrgRouteDistributionStatement("testDrgRouteDistributionStatement",
drg_route_distribution_id=oci_core_drg_route_distribution["test_drg_route_distribution"]["id"],
action=var["drg_route_distribution_statement_statements_action"],
match_criteria=oci.core.DrgRouteDistributionStatementMatchCriteriaArgs(
match_type=var["drg_route_distribution_statement_statements_match_criteria_match_type"],
attachment_type=var["drg_route_distribution_statement_statements_match_criteria_attachment_type"],
drg_attachment_id=oci_core_drg_attachment["test_drg_attachment"]["id"],
),
priority=var["drg_route_distribution_statement_statements_priority"])
```
## Import
DrgRouteDistributionStatement can be imported using the `id`, e.g.
```sh
$ pulumi import oci:core/drgRouteDistributionStatement:DrgRouteDistributionStatement test_drg_route_distribution_statement "drgRouteDistributions/{drgRouteDistributionId}/statements/{id}"
```
:param str resource_name: The name of the resource.
:param DrgRouteDistributionStatementArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DrgRouteDistributionStatementArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
drg_route_distribution_id: Optional[pulumi.Input[str]] = None,
match_criteria: Optional[pulumi.Input[pulumi.InputType['DrgRouteDistributionStatementMatchCriteriaArgs']]] = None,
priority: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DrgRouteDistributionStatementArgs.__new__(DrgRouteDistributionStatementArgs)
if action is None and not opts.urn:
raise TypeError("Missing required property 'action'")
__props__.__dict__["action"] = action
if drg_route_distribution_id is None and not opts.urn:
raise TypeError("Missing required property 'drg_route_distribution_id'")
__props__.__dict__["drg_route_distribution_id"] = drg_route_distribution_id
if match_criteria is None and not opts.urn:
raise TypeError("Missing required property 'match_criteria'")
__props__.__dict__["match_criteria"] = match_criteria
if priority is None and not opts.urn:
raise TypeError("Missing required property 'priority'")
__props__.__dict__["priority"] = priority
super(DrgRouteDistributionStatement, __self__).__init__(
'oci:core/drgRouteDistributionStatement:DrgRouteDistributionStatement',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
drg_route_distribution_id: Optional[pulumi.Input[str]] = None,
match_criteria: Optional[pulumi.Input[pulumi.InputType['DrgRouteDistributionStatementMatchCriteriaArgs']]] = None,
priority: Optional[pulumi.Input[int]] = None) -> 'DrgRouteDistributionStatement':
"""
Get an existing DrgRouteDistributionStatement resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] action: Accept: import/export the route "as is"
:param pulumi.Input[str] drg_route_distribution_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the route distribution.
:param pulumi.Input[pulumi.InputType['DrgRouteDistributionStatementMatchCriteriaArgs']] match_criteria: (Updatable) The action is applied only if all of the match criteria is met. If there are no match criteria in a statement, match ALL is implied.
:param pulumi.Input[int] priority: (Updatable) This field is used to specify the priority of each statement in a route distribution. The priority will be represented as a number between 0 and 65535 where a lower number indicates a higher priority. When a route is processed, statements are applied in the order defined by their priority. The first matching rule dictates the action that will be taken on the route.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DrgRouteDistributionStatementState.__new__(_DrgRouteDistributionStatementState)
__props__.__dict__["action"] = action
__props__.__dict__["drg_route_distribution_id"] = drg_route_distribution_id
__props__.__dict__["match_criteria"] = match_criteria
__props__.__dict__["priority"] = priority
return DrgRouteDistributionStatement(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def action(self) -> pulumi.Output[str]:
"""
Accept: import/export the route "as is"
"""
return pulumi.get(self, "action")
@property
@pulumi.getter(name="drgRouteDistributionId")
def drg_route_distribution_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the route distribution.
"""
return pulumi.get(self, "drg_route_distribution_id")
@property
@pulumi.getter(name="matchCriteria")
def match_criteria(self) -> pulumi.Output['outputs.DrgRouteDistributionStatementMatchCriteria']:
"""
(Updatable) The action is applied only if all of the match criteria is met. If there are no match criteria in a statement, match ALL is implied.
"""
return pulumi.get(self, "match_criteria")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[int]:
"""
(Updatable) This field is used to specify the priority of each statement in a route distribution. The priority will be represented as a number between 0 and 65535 where a lower number indicates a higher priority. When a route is processed, statements are applied in the order defined by their priority. The first matching rule dictates the action that will be taken on the route.
"""
return pulumi.get(self, "priority")
| 55.670487
| 422
| 0.703999
| 2,271
| 19,429
| 5.811537
| 0.090709
| 0.090165
| 0.0788
| 0.060009
| 0.828156
| 0.81285
| 0.804592
| 0.78027
| 0.758372
| 0.75716
| 0
| 0.002816
| 0.214164
| 19,429
| 348
| 423
| 55.83046
| 0.861606
| 0.473673
| 0
| 0.553073
| 1
| 0
| 0.164
| 0.101063
| 0
| 0
| 0
| 0
| 0
| 1
| 0.150838
| false
| 0.005587
| 0.039106
| 0
| 0.27933
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
beedd3794aeec15d1c9056b0efb1d0040b6e5b45
| 41
|
py
|
Python
|
skimage/measure/__init__.py
|
Teva/scikits.image
|
12669d62e699313ca0f73de1b211bf438f4efb0c
|
[
"BSD-3-Clause"
] | 3
|
2015-11-12T06:34:49.000Z
|
2017-09-22T07:47:50.000Z
|
skimage/measure/__init__.py
|
Teva/scikits.image
|
12669d62e699313ca0f73de1b211bf438f4efb0c
|
[
"BSD-3-Clause"
] | null | null | null |
skimage/measure/__init__.py
|
Teva/scikits.image
|
12669d62e699313ca0f73de1b211bf438f4efb0c
|
[
"BSD-3-Clause"
] | 8
|
2015-03-02T20:36:55.000Z
|
2021-02-18T10:37:00.000Z
|
from .find_contours import find_contours
| 20.5
| 40
| 0.878049
| 6
| 41
| 5.666667
| 0.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
830a264513ccc1edd1b34d9f27cfb8e63174a4eb
| 5,737
|
py
|
Python
|
proj/hog/tests/02.py
|
weijiew/cs61a-sp20
|
73322b87fe40add0350e0076ad3589fbee1f28ec
|
[
"MIT"
] | 8
|
2020-07-28T11:10:49.000Z
|
2021-05-29T15:27:17.000Z
|
03-Project-Hog/hog/hog/tests/02.py
|
ericchen12377/CS61A_LearningDoc
|
31f23962b0e2834795bf61eeb0f4884cc5da1809
|
[
"MIT"
] | null | null | null |
03-Project-Hog/hog/hog/tests/02.py
|
ericchen12377/CS61A_LearningDoc
|
31f23962b0e2834795bf61eeb0f4884cc5da1809
|
[
"MIT"
] | 1
|
2020-10-23T08:15:08.000Z
|
2020-10-23T08:15:08.000Z
|
test = {
'name': 'Question 2',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> free_bacon(4)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(1)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(20)
9
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(45)
13
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(15)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(13)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(44)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(37)
10
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(40)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(24)
9
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(46)
9
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(99)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(10)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(47)
6
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(67)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(92)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(9)
15
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(25)
6
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(75)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(82)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(88)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(72)
8
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(41)
7
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(15)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(42)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(93)
8
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(99)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(73)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(4)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(83)
8
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(34)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(4)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(53)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(19)
7
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(1)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> free_bacon(85)
6
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> from hog import *
""",
'teardown': '',
'type': 'doctest'
}
]
}
| 18.809836
| 28
| 0.264947
| 378
| 5,737
| 3.925926
| 0.156085
| 0.121294
| 0.218329
| 0.339623
| 0.903639
| 0.88814
| 0.88814
| 0.88814
| 0.815364
| 0.412399
| 0
| 0.04325
| 0.568764
| 5,737
| 304
| 29
| 18.871711
| 0.556589
| 0
| 0
| 0.506579
| 0
| 0
| 0.442914
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003289
| 0
| 0.003289
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8340cb90317cb69103bdbfa9ad71dcd02406276b
| 106
|
py
|
Python
|
contests_atcoder/abc188/abc188_b_zip.py
|
takelifetime/competitive-programming
|
e7cf8ef923ccefad39a1727ca94c610d650fcb76
|
[
"BSD-2-Clause"
] | null | null | null |
contests_atcoder/abc188/abc188_b_zip.py
|
takelifetime/competitive-programming
|
e7cf8ef923ccefad39a1727ca94c610d650fcb76
|
[
"BSD-2-Clause"
] | 1
|
2021-01-02T06:36:51.000Z
|
2021-01-02T06:36:51.000Z
|
contests_atcoder/abc188/abc188_b_zip.py
|
takelifetime/competitive-programming
|
e7cf8ef923ccefad39a1727ca94c610d650fcb76
|
[
"BSD-2-Clause"
] | null | null | null |
input();print("No"if sum(x*y for x,y in zip(map(int,input().split()),map(int,input().split())))else "Yes")
| 106
| 106
| 0.641509
| 22
| 106
| 3.090909
| 0.681818
| 0.058824
| 0.323529
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 106
| 1
| 106
| 106
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0.046729
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
55fef8e947bb15d0905ed98b27908901184f17b4
| 81
|
py
|
Python
|
tests/test_default.py
|
jonringer/pyscreenshot
|
44cefded198b26fd162ab12c9e947704ec9dced0
|
[
"BSD-2-Clause"
] | 416
|
2015-01-01T00:41:31.000Z
|
2022-03-31T10:15:53.000Z
|
tests/test_default.py
|
jonringer/pyscreenshot
|
44cefded198b26fd162ab12c9e947704ec9dced0
|
[
"BSD-2-Clause"
] | 72
|
2015-02-23T20:12:17.000Z
|
2022-03-02T21:23:17.000Z
|
tests/test_default.py
|
jonringer/pyscreenshot
|
44cefded198b26fd162ab12c9e947704ec9dced0
|
[
"BSD-2-Clause"
] | 88
|
2015-03-04T03:29:43.000Z
|
2021-10-04T06:37:00.000Z
|
from bt import backend_to_check
def test_default():
backend_to_check(None)
| 13.5
| 31
| 0.777778
| 13
| 81
| 4.461538
| 0.769231
| 0.310345
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160494
| 81
| 5
| 32
| 16.2
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
362c90282228857a618bce4f430e994fd979aba8
| 183
|
py
|
Python
|
zipkin/binding/requests/impl.py
|
MoiTux/python-zipkin
|
4c69a28a43176fe24a2aeac932c153258ff7e60a
|
[
"Apache-2.0"
] | null | null | null |
zipkin/binding/requests/impl.py
|
MoiTux/python-zipkin
|
4c69a28a43176fe24a2aeac932c153258ff7e60a
|
[
"Apache-2.0"
] | null | null | null |
zipkin/binding/requests/impl.py
|
MoiTux/python-zipkin
|
4c69a28a43176fe24a2aeac932c153258ff7e60a
|
[
"Apache-2.0"
] | null | null | null |
import requests.sessions
from . import events
def bind():
old_init = requests.sessions.Session.__init__
requests.sessions.Session.__init__ = events.session_init(old_init)
| 18.3
| 70
| 0.770492
| 23
| 183
| 5.652174
| 0.434783
| 0.369231
| 0.307692
| 0.415385
| 0.446154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142077
| 183
| 9
| 71
| 20.333333
| 0.828025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
36bb689d1d41ae5fd3b55d85a35430be12ae636f
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_taric/na_taric_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_taric/na_taric_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_taric/na_taric_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Taric_Sup_Aatrox(Ratings):
pass
class NA_Taric_Sup_Ahri(Ratings):
pass
class NA_Taric_Sup_Akali(Ratings):
pass
class NA_Taric_Sup_Alistar(Ratings):
pass
class NA_Taric_Sup_Amumu(Ratings):
pass
class NA_Taric_Sup_Anivia(Ratings):
pass
class NA_Taric_Sup_Annie(Ratings):
pass
class NA_Taric_Sup_Ashe(Ratings):
pass
class NA_Taric_Sup_AurelionSol(Ratings):
pass
class NA_Taric_Sup_Azir(Ratings):
pass
class NA_Taric_Sup_Bard(Ratings):
pass
class NA_Taric_Sup_Blitzcrank(Ratings):
pass
class NA_Taric_Sup_Brand(Ratings):
pass
class NA_Taric_Sup_Braum(Ratings):
pass
class NA_Taric_Sup_Caitlyn(Ratings):
pass
class NA_Taric_Sup_Camille(Ratings):
pass
class NA_Taric_Sup_Cassiopeia(Ratings):
pass
class NA_Taric_Sup_Chogath(Ratings):
pass
class NA_Taric_Sup_Corki(Ratings):
pass
class NA_Taric_Sup_Darius(Ratings):
pass
class NA_Taric_Sup_Diana(Ratings):
pass
class NA_Taric_Sup_Draven(Ratings):
pass
class NA_Taric_Sup_DrMundo(Ratings):
pass
class NA_Taric_Sup_Ekko(Ratings):
pass
class NA_Taric_Sup_Elise(Ratings):
pass
class NA_Taric_Sup_Evelynn(Ratings):
pass
class NA_Taric_Sup_Ezreal(Ratings):
pass
class NA_Taric_Sup_Fiddlesticks(Ratings):
pass
class NA_Taric_Sup_Fiora(Ratings):
pass
class NA_Taric_Sup_Fizz(Ratings):
pass
class NA_Taric_Sup_Galio(Ratings):
pass
class NA_Taric_Sup_Gangplank(Ratings):
pass
class NA_Taric_Sup_Garen(Ratings):
pass
class NA_Taric_Sup_Gnar(Ratings):
pass
class NA_Taric_Sup_Gragas(Ratings):
pass
class NA_Taric_Sup_Graves(Ratings):
pass
class NA_Taric_Sup_Hecarim(Ratings):
pass
class NA_Taric_Sup_Heimerdinger(Ratings):
pass
class NA_Taric_Sup_Illaoi(Ratings):
pass
class NA_Taric_Sup_Irelia(Ratings):
pass
class NA_Taric_Sup_Ivern(Ratings):
pass
class NA_Taric_Sup_Janna(Ratings):
pass
class NA_Taric_Sup_JarvanIV(Ratings):
pass
class NA_Taric_Sup_Jax(Ratings):
pass
class NA_Taric_Sup_Jayce(Ratings):
pass
class NA_Taric_Sup_Jhin(Ratings):
pass
class NA_Taric_Sup_Jinx(Ratings):
pass
class NA_Taric_Sup_Kalista(Ratings):
pass
class NA_Taric_Sup_Karma(Ratings):
pass
class NA_Taric_Sup_Karthus(Ratings):
pass
class NA_Taric_Sup_Kassadin(Ratings):
pass
class NA_Taric_Sup_Katarina(Ratings):
pass
class NA_Taric_Sup_Kayle(Ratings):
pass
class NA_Taric_Sup_Kayn(Ratings):
pass
class NA_Taric_Sup_Kennen(Ratings):
pass
class NA_Taric_Sup_Khazix(Ratings):
pass
class NA_Taric_Sup_Kindred(Ratings):
pass
class NA_Taric_Sup_Kled(Ratings):
pass
class NA_Taric_Sup_KogMaw(Ratings):
pass
class NA_Taric_Sup_Leblanc(Ratings):
pass
class NA_Taric_Sup_LeeSin(Ratings):
pass
class NA_Taric_Sup_Leona(Ratings):
pass
class NA_Taric_Sup_Lissandra(Ratings):
pass
class NA_Taric_Sup_Lucian(Ratings):
pass
class NA_Taric_Sup_Lulu(Ratings):
pass
class NA_Taric_Sup_Lux(Ratings):
pass
class NA_Taric_Sup_Malphite(Ratings):
pass
class NA_Taric_Sup_Malzahar(Ratings):
pass
class NA_Taric_Sup_Maokai(Ratings):
pass
class NA_Taric_Sup_MasterYi(Ratings):
pass
class NA_Taric_Sup_MissFortune(Ratings):
pass
class NA_Taric_Sup_MonkeyKing(Ratings):
pass
class NA_Taric_Sup_Mordekaiser(Ratings):
pass
class NA_Taric_Sup_Morgana(Ratings):
pass
class NA_Taric_Sup_Nami(Ratings):
pass
class NA_Taric_Sup_Nasus(Ratings):
pass
class NA_Taric_Sup_Nautilus(Ratings):
pass
class NA_Taric_Sup_Nidalee(Ratings):
pass
class NA_Taric_Sup_Nocturne(Ratings):
pass
class NA_Taric_Sup_Nunu(Ratings):
pass
class NA_Taric_Sup_Olaf(Ratings):
pass
class NA_Taric_Sup_Orianna(Ratings):
pass
class NA_Taric_Sup_Ornn(Ratings):
pass
class NA_Taric_Sup_Pantheon(Ratings):
pass
class NA_Taric_Sup_Poppy(Ratings):
pass
class NA_Taric_Sup_Quinn(Ratings):
pass
class NA_Taric_Sup_Rakan(Ratings):
pass
class NA_Taric_Sup_Rammus(Ratings):
pass
class NA_Taric_Sup_RekSai(Ratings):
pass
class NA_Taric_Sup_Renekton(Ratings):
pass
class NA_Taric_Sup_Rengar(Ratings):
pass
class NA_Taric_Sup_Riven(Ratings):
pass
class NA_Taric_Sup_Rumble(Ratings):
pass
class NA_Taric_Sup_Ryze(Ratings):
pass
class NA_Taric_Sup_Sejuani(Ratings):
pass
class NA_Taric_Sup_Shaco(Ratings):
pass
class NA_Taric_Sup_Shen(Ratings):
pass
class NA_Taric_Sup_Shyvana(Ratings):
pass
class NA_Taric_Sup_Singed(Ratings):
pass
class NA_Taric_Sup_Sion(Ratings):
pass
class NA_Taric_Sup_Sivir(Ratings):
pass
class NA_Taric_Sup_Skarner(Ratings):
pass
class NA_Taric_Sup_Sona(Ratings):
pass
class NA_Taric_Sup_Soraka(Ratings):
pass
class NA_Taric_Sup_Swain(Ratings):
pass
class NA_Taric_Sup_Syndra(Ratings):
pass
class NA_Taric_Sup_TahmKench(Ratings):
pass
class NA_Taric_Sup_Taliyah(Ratings):
pass
class NA_Taric_Sup_Talon(Ratings):
pass
class NA_Taric_Sup_Taric(Ratings):
pass
class NA_Taric_Sup_Teemo(Ratings):
pass
class NA_Taric_Sup_Thresh(Ratings):
pass
class NA_Taric_Sup_Tristana(Ratings):
pass
class NA_Taric_Sup_Trundle(Ratings):
pass
class NA_Taric_Sup_Tryndamere(Ratings):
pass
class NA_Taric_Sup_TwistedFate(Ratings):
pass
class NA_Taric_Sup_Twitch(Ratings):
pass
class NA_Taric_Sup_Udyr(Ratings):
pass
class NA_Taric_Sup_Urgot(Ratings):
pass
class NA_Taric_Sup_Varus(Ratings):
pass
class NA_Taric_Sup_Vayne(Ratings):
pass
class NA_Taric_Sup_Veigar(Ratings):
pass
class NA_Taric_Sup_Velkoz(Ratings):
pass
class NA_Taric_Sup_Vi(Ratings):
pass
class NA_Taric_Sup_Viktor(Ratings):
pass
class NA_Taric_Sup_Vladimir(Ratings):
pass
class NA_Taric_Sup_Volibear(Ratings):
pass
class NA_Taric_Sup_Warwick(Ratings):
pass
class NA_Taric_Sup_Xayah(Ratings):
pass
class NA_Taric_Sup_Xerath(Ratings):
pass
class NA_Taric_Sup_XinZhao(Ratings):
pass
class NA_Taric_Sup_Yasuo(Ratings):
pass
class NA_Taric_Sup_Yorick(Ratings):
pass
class NA_Taric_Sup_Zac(Ratings):
pass
class NA_Taric_Sup_Zed(Ratings):
pass
class NA_Taric_Sup_Ziggs(Ratings):
pass
class NA_Taric_Sup_Zilean(Ratings):
pass
class NA_Taric_Sup_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
36d10b5b0a7b1e1f6221f172f44836f00e1a40f9
| 20,887
|
py
|
Python
|
nidaqmx/_task_modules/read_functions.py
|
stafak/nidaqmx-python
|
f354d7971b21074c120c6f298dbbf4a5e0e4f4f4
|
[
"MIT"
] | 252
|
2017-03-22T02:43:16.000Z
|
2022-03-27T14:44:44.000Z
|
nidaqmx/_task_modules/read_functions.py
|
stafak/nidaqmx-python
|
f354d7971b21074c120c6f298dbbf4a5e0e4f4f4
|
[
"MIT"
] | 133
|
2017-03-21T20:57:59.000Z
|
2022-03-31T16:08:12.000Z
|
nidaqmx/_task_modules/read_functions.py
|
stafak/nidaqmx-python
|
f354d7971b21074c120c6f298dbbf4a5e0e4f4f4
|
[
"MIT"
] | 124
|
2017-04-01T18:35:24.000Z
|
2022-03-25T06:30:00.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import ctypes
import numpy
from nidaqmx._lib import lib_importer, wrapped_ndpointer, c_bool32
from nidaqmx.constants import FillMode
from nidaqmx.errors import check_for_error
from nidaqmx.types import CtrFreq, CtrTick, CtrTime
def _read_analog_f_64(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadAnalogF64
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
c_bool32,
wrapped_ndpointer(dtype=numpy.float64, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_analog_scalar_f_64(task_handle, timeout):
value = ctypes.c_double()
cfunc = lib_importer.windll.DAQmxReadAnalogScalarF64
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_double,
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, timeout, ctypes.byref(value), None)
check_for_error(error_code)
return value.value
def _read_binary_i_16(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadBinaryI16
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.int16, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_binary_u_16(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadBinaryU16
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.uint16, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_binary_i_32(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadBinaryI32
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.int32, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_binary_u_32(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadBinaryU32
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.uint32, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_digital_u_8(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadDigitalU8
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.uint8, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_digital_u_16(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadDigitalU16
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.uint16, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_digital_u_32(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadDigitalU32
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.uint32, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_digital_scalar_u_32(task_handle, timeout):
value = ctypes.c_uint()
cfunc = lib_importer.windll.DAQmxReadDigitalScalarU32
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_double,
ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, timeout, ctypes.byref(value), None)
check_for_error(error_code)
return value.value
def _read_digital_lines(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
num_bytes_per_samp = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadDigitalLines
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.bool, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int), ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read),
ctypes.byref(num_bytes_per_samp), None)
check_for_error(error_code)
ReadDigitalLinesReturnData = (
collections.namedtuple(
'ReadDigitalLinesReturnData',
['samps_per_chan_read', 'num_bytes_per_samp']))
return ReadDigitalLinesReturnData(
samps_per_chan_read.value, num_bytes_per_samp.value)
def _read_counter_f_64(task_handle, read_array, num_samps_per_chan, timeout):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadCounterF64
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.float64, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_counter_u_32(task_handle, read_array, num_samps_per_chan, timeout):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadCounterU32
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.uint32, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_counter_f_64_ex(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadCounterF64Ex
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.float64, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_counter_u_32_ex(
task_handle, read_array, num_samps_per_chan, timeout,
fill_mode=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadCounterU32Ex
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.uint32, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, fill_mode.value,
read_array, numpy.prod(read_array.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_counter_scalar_f_64(task_handle, timeout):
value = ctypes.c_double()
cfunc = lib_importer.windll.DAQmxReadCounterScalarF64
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_double,
ctypes.POINTER(ctypes.c_double), ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, timeout, ctypes.byref(value), None)
check_for_error(error_code)
return value.value
def _read_counter_scalar_u_32(task_handle, timeout):
value = ctypes.c_uint()
cfunc = lib_importer.windll.DAQmxReadCounterScalarU32
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_double,
ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, timeout, ctypes.byref(value), None)
check_for_error(error_code)
return value.value
def _read_ctr_freq(
task_handle, freq, duty_cycle, num_samps_per_chan, timeout,
interleaved=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadCtrFreq
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.float64, flags=('C', 'W')),
wrapped_ndpointer(dtype=numpy.float64, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, interleaved.value,
freq, duty_cycle, numpy.prod(freq.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_ctr_time(
task_handle, high_time, low_time, num_samps_per_chan, timeout,
interleaved=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadCtrTime
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.float64, flags=('C', 'W')),
wrapped_ndpointer(dtype=numpy.float64, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, interleaved.value,
high_time, low_time, numpy.prod(high_time.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_ctr_ticks(
task_handle, high_tick, low_tick, num_samps_per_chan, timeout,
interleaved=FillMode.GROUP_BY_CHANNEL):
samps_per_chan_read = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadCtrTicks
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
ctypes.c_int,
wrapped_ndpointer(dtype=numpy.uint32, flags=('C', 'W')),
wrapped_ndpointer(dtype=numpy.uint32, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, interleaved.value,
high_tick, low_tick, numpy.prod(high_tick.shape),
ctypes.byref(samps_per_chan_read), None)
check_for_error(error_code)
return samps_per_chan_read.value
def _read_ctr_freq_scalar(task_handle, timeout):
freq = ctypes.c_double()
duty_cycle = ctypes.c_double()
cfunc = lib_importer.windll.DAQmxReadCtrFreqScalar
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_double,
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, timeout, ctypes.byref(freq),
ctypes.byref(duty_cycle), None)
check_for_error(error_code)
value = CtrFreq(
freq.value, duty_cycle.value)
return value
def _read_ctr_time_scalar(task_handle, timeout):
high_time = ctypes.c_double()
low_time = ctypes.c_double()
cfunc = lib_importer.windll.DAQmxReadCtrTimeScalar
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_double,
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, timeout, ctypes.byref(high_time),
ctypes.byref(low_time), None)
check_for_error(error_code)
value = CtrTime(
high_time.value, low_time.value)
return value
def _read_ctr_ticks_scalar(task_handle, timeout):
high_ticks = ctypes.c_uint()
low_ticks = ctypes.c_uint()
cfunc = lib_importer.windll.DAQmxReadCtrTicksScalar
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_double,
ctypes.POINTER(ctypes.c_uint),
ctypes.POINTER(ctypes.c_uint),
ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, timeout, ctypes.byref(high_ticks),
ctypes.byref(low_ticks), None)
check_for_error(error_code)
return CtrTick(
high_ticks.value, low_ticks.value)
def _read_raw(task_handle, read_array, num_samps_per_chan, timeout):
samples_read = ctypes.c_int()
number_of_bytes_per_sample = ctypes.c_int()
cfunc = lib_importer.windll.DAQmxReadRaw
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes.c_int, ctypes.c_double,
wrapped_ndpointer(dtype=read_array.dtype, flags=('C', 'W')),
ctypes.c_uint, ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int), ctypes.POINTER(c_bool32)]
error_code = cfunc(
task_handle, num_samps_per_chan, timeout, read_array,
read_array.nbytes, ctypes.byref(samples_read),
ctypes.byref(number_of_bytes_per_sample), None)
check_for_error(error_code)
return samples_read.value, number_of_bytes_per_sample.value
| 35.582624
| 80
| 0.641835
| 2,626
| 20,887
| 4.761615
| 0.048743
| 0.072217
| 0.079655
| 0.0627
| 0.883077
| 0.862364
| 0.854367
| 0.830934
| 0.830934
| 0.827655
| 0
| 0.009805
| 0.272466
| 20,887
| 586
| 81
| 35.643345
| 0.813043
| 0
| 0
| 0.774403
| 0
| 0
| 0.004931
| 0.001245
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052061
| false
| 0
| 0.125813
| 0
| 0.229935
| 0.002169
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ffad0ae694f2356d7693551296edf0d0330bcab
| 10,839
|
py
|
Python
|
test/myproject/blog/s3filter.py
|
CloudKloud/CloudKloud
|
2f00ff43ca239ef7b06a511037b910c537b40893
|
[
"MIT"
] | 1
|
2021-01-23T11:21:30.000Z
|
2021-01-23T11:21:30.000Z
|
test/myproject/blog/s3filter.py
|
CloudKloud/CloudKloud
|
2f00ff43ca239ef7b06a511037b910c537b40893
|
[
"MIT"
] | null | null | null |
test/myproject/blog/s3filter.py
|
CloudKloud/CloudKloud
|
2f00ff43ca239ef7b06a511037b910c537b40893
|
[
"MIT"
] | 2
|
2020-12-18T17:56:58.000Z
|
2020-12-23T05:20:34.000Z
|
import boto3
import time
import json
import datetime
from regist.models import accessKeyIDPW
db = accessKeyIDPW.objects.all()
if db:
AWS_ACCESS_KEY_ID = db[0].accesskeyid
AWS_SECRET_ACCESS_KEY = db[0].secretaccesskey
AWS_DEFAULT_REGION = db[0].awsconfigregion
logs = boto3.client('logs',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_DEFAULT_REGION)
s3 = boto3.client('s3',aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_DEFAULT_REGION)
# 버킷 목록 검색 (10초)
def List_Objects():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="ListObjects"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="ListObjects"}'
)
for i in log['events']:
msg_json = json.loads(i.get('message'))
if 's3.amazonaws.com' in msg_json['eventSource']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/0' )
return response
# S3 데이터 생성 (2분)
def S3_Create_Data():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="PutObject"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="PutObject"}'
)
for i in log['events']:
msg_json = json.loads(i.get('message'))
if 's3.amazonaws.com' in msg_json['eventSource']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/1' )
return response
# S3 데이터 삭제 (10초 내)
def S3_Delete_Data():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="DeleteObjects"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="DeleteObjects"}'
)
for i in log['events']:
msg_json = json.loads(i.get('message'))
if 's3.amazonaws.com' in msg_json['eventSource']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/2' )
return response
# 비정상적 IAM 개체의 S3 API 호출 ListBuckets & "errorCode":"AccessDenied"
def Call_API_Abnormal_Object():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="ListBuckets"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="ListBuckets"}'
)
for i in log['events']:
msg_json = json.loads(i.get('message'))
if 's3.amazonaws.com' in msg_json['eventSource']:
if '"errorCode":"AccessDenied"' in i['message']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/3' )
return response
# 서버 액세스 로깅 비활성화
def Access_Logging_Disabled():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="GetBucketPublicAccessBlock"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="GetBucketPublicAccessBlock"}'
)
for i in log['events']:
msg_json = json.loads(i.get('message'))
if 's3.amazonaws.com' in msg_json['eventSource']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/4' )
return response
# 버킷 또는 객체의 권한 변경
def Modify_Policy_BucketObject():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="PutBucketAcl"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="PutBucketAcl"}'
)
for i in log['events']:
msg_json = json.loads(i.get('message'))
if 's3.amazonaws.com' in msg_json['eventSource']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/5' )
return response
# 버킷 정책 변경
def Modify_Bucket_Policy():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="PutBucketPolicy"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.eventName="PutBucketPolicy"}'
)
for i in log['events']:
msg_json = json.loads(i.get('message'))
if 's3.amazonaws.com' in msg_json['eventSource']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/6' )
return response
# 특정 linux 시스템에서의 접근
def Access_System():
cnt = 0
output = []
next_token = ''
while True:
if next_token:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.userAgent="-kali" || $.userAgent="parrot - WebIdentityUser" || $.userAgent="pentoo"}',
nextToken=next_token
)
else:
log = logs.filter_log_events(
logGroupName='all_region_cloudtrail',
filterPattern='{$.userAgent="-kali" || $.userAgent="parrot - WebIdentityUser" || $.userAgent="pentoo"}'
)
for i in log['events']:
if 's3.amazonaws.com' in i['message']:
result = {"id" : cnt, "timestamp": datetime.datetime.fromtimestamp(i['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S'), "message": i['message']}
cnt += 1
output.append(result)
if log.get("nextToken"):
next_token = log["nextToken"]
else:
break
ret = json.dumps({"total" : cnt, "totalNotFiltered" : cnt, "rows" : output})
response = s3.put_object(Body=ret,
Bucket='threatitem',
Key='S3/7' )
return response
| 34.300633
| 163
| 0.520712
| 1,086
| 10,839
| 5.039595
| 0.127072
| 0.052622
| 0.035081
| 0.046775
| 0.88087
| 0.875206
| 0.875206
| 0.875206
| 0.875206
| 0.875206
| 0
| 0.013893
| 0.349202
| 10,839
| 315
| 164
| 34.409524
| 0.761979
| 0.015684
| 0
| 0.762264
| 0
| 0
| 0.200769
| 0.077118
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030189
| false
| 0
| 0.018868
| 0
| 0.079245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ffbdeb7a1162ccdf461f5ef56d99ef0dbb10271
| 77,796
|
py
|
Python
|
tests/test_mask_rules.py
|
j-h-m/Media-Journaling-Tool
|
4ab6961e2768dc002c9bbad182f83188631f01bd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_mask_rules.py
|
j-h-m/Media-Journaling-Tool
|
4ab6961e2768dc002c9bbad182f83188631f01bd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_mask_rules.py
|
j-h-m/Media-Journaling-Tool
|
4ab6961e2768dc002c9bbad182f83188631f01bd
|
[
"BSD-3-Clause"
] | null | null | null |
import networkx as nx
from maskgen.mask_rules import *
from mock import *
from test_support import TestSupport
class ImageGraphB:
def __init__(self, G):
"""
:param G:
@type G: nx.DiGraph
"""
self.G = G
def successors(self, node):
return self.G.successors(node)
def get_node(self, node):
return self.G.node[node]
def get_edge(self, start, end):
return self.G[start][end] if (self.G.has_edge(start, end)) else None
def get_nodes(self):
return self.G.nodes()
class TestMaskRules(TestSupport):
def test_add_audio(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
u'empty mask': 'no',
u'arguments': {'voice': 'no',
'add type': 'replace',
'filter type': 'Other',
'synchronization': 'none',
'Start Time': '00:00:00',
'Stream': 'all',
'Direct from PC': 'no'
},
u'op': u'AddAudioSample'}
mask = video_tools.create_segment(
starttime=0,
startframe=1,
endtime=4,
endframe=176400,
frames=176399,
rate=44100,
error=0,
type='audio')
cm = CompositeImage(source='a', target='b', media_type='audio', mask=[mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': .2267,
'startframe': 10000,
'endtime': .4535,
'endframe': 20000,
'frames': 10000,
'type': 'audio',
'rate': 44100
})]
result = add_audio(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual(9999,
video_tools.get_end_frame_from_segment(result.videomasks[0]))
self.assertEqual(20001,
video_tools.get_start_frame_from_segment(result.videomasks[1]))
def test_copy_add_audio(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
u'empty mask': 'no',
u'arguments': {'voice': 'no',
'add type': 'replace',
'filter type': 'Other',
'synchronization': 'none',
'Copy Start Time': '00:00:00',
'Copy End Time': '00:01:00',
'Insertion Time': '00:03:00',
'Stream': 'all',
'Direct from PC': 'no'
},
u'op': u'AudioCopyAdd'}
mask = video_tools.create_segment(
starttime=3000,
startframe=132300,
endtime=4000,
endframe=176400,
frames=44100,
rate=44100,
error=0,
type='audio')
cm = CompositeImage(source='a', target='b', media_type='audio', mask=[mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 3500,
'startframe': 154350,
'endtime': 4500,
'endframe': 198450,
'frames': 44100,
'type': 'audio',
'rate': 44100
})]
result = copy_add_audio(mock_composite)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(154349,
video_tools.get_end_frame_from_segment(result.videomasks[0]))
self.assertEqual(132300,
video_tools.get_start_frame_from_segment(result.videomasks[0]))
edge['arguments']['add type'] = 'insert'
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 3500,
'startframe': 154350,
'endtime': 4500,
'endframe': 198450,
'frames': 44101,
'type': 'audio',
'rate': 44100
})]
result = copy_add_audio(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual(154349,
video_tools.get_end_frame_from_segment(result.videomasks[0]))
self.assertEqual(198451,
video_tools.get_start_frame_from_segment(result.videomasks[1]))
self.assertEqual(22050,
video_tools.get_frames_from_segment(result.videomasks[0]))
self.assertEqual(22051,
video_tools.get_frames_from_segment(result.videomasks[1]))
def test_replace_audio(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'voice': 'no',
'filter type': 'Other',
'Stream': 'all',
},
u'op': u'ReplaceAudioSample'}
mask = video_tools.create_segment(
starttime=0,
startframe=1,
endtime=4,
endframe=176400,
frames=176399,
rate=44100,
error=0,
type='audio')
cm = CompositeImage(source='a', target='b', media_type='audio', mask=[mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 0,
'startframe': 1,
'endtime': 4,
'endframe': 176400,
'frames': 176399,
'type': 'audio',
'rate': 44800
})]
result = replace_audio(mock_composite)
self.assertEqual(0, len(result.videomasks))
def test_audio_selection(self):
# TODO: Placeholer for final implementaton
edge = {u'maskname': u'output_mask.png',
u'inputmaskname': None,
'empty mask': 'no',
u'op': u'OutputAudioPCM'}
mask = video_tools.create_segment(
starttime=1400,
startframe=15,
endtime=2400,
endframe=25,
frames=11,
rate=10,
error=0,
type='video')
mask1 = video_tools.create_segment(
starttime=1400,
startframe=15,
endtime=2400,
endframe=25,
frames=11,
rate=10,
error=0,
type='audio')
cm = CompositeImage('a', 'b', 'video', [mask, mask1])
graph = Mock()
graph.get_node = Mock(return_value={'shape': '(3984, 2988)'})
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3784, 2788, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3784, 2788),
directory='.',
donorMask=None,
compositeMask=cm,
pred_edges=None,
graph=graph)
result = audio_selection(buildState)
self.assertEqual(1, len(result.videomasks))
self.assertEqual('video', video_tools.get_type_of_segment(result.videomasks[0]))
def test_output(self):
edge = {u'maskname': u'output_mask.png',
u'inputmaskname': None,
u'shape change': u'(-100, -100)',
'empty mask': 'no',
u'op': u'OutputMOV'}
mask = video_tools.create_segment(
starttime=1400,
startframe=15,
endtime=2400,
endframe=25,
frames=11,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
graph.get_node = Mock(return_value={'shape': '(3984, 2988)'})
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3784, 2788, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3784, 2788),
directory='.',
donorMask=None,
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.shapeChange = buildState.shapeChange
mock_composite.getVideoMetaExtractor = buildState.getVideoMetaExtractor
mock_composite.warpMask.return_value = CompositeImage('a', 'b', 'video', [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})])
mock_composite.compositeMask = cm
mock_composite.isComposite = True
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = output_video_change(mock_composite)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(15, video_tools.get_start_frame_from_segment(result.videomasks[0]))
self.assertEqual(25, result.videomasks[0]['endframe'])
self.assertEqual(11, result.videomasks[0]['frames'])
self.assertEqual(1400, result.videomasks[0]['starttime'])
self.assertEqual(2400.0, result.videomasks[0]['endtime'])
def test_crop_resize_transform(self):
edge = {u'maskname': u'crop_resize_mask.png',
u'inputmaskname': None,
u'location': u'(50, 50)',
'empty mask': 'no',
u'arguments': {'crop width': 2500, 'crop height': 3500},
u'op': u'TransformCropResize'}
img = np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8)
img_crop = img[10:3500, 10:2500, :]
img_crop_resize = cv2.resize(img_crop, (img.shape[1], img.shape[0]))
composite_mask = np.zeros((3984, 2988), dtype=np.uint8)
composite_mask[0:100, 0:100] = 1
buildState = BuildState(edge,
img,
img_crop_resize,
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3984, 2988),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image', composite_mask),
pred_edges=None,
graph=None)
result = crop_resize_transform(buildState)
self.assertEqual((3984, 2988), result.mask.shape)
self.assertEqual(1, result.mask[0, 0])
self.assertEqual(1, result.mask[49, 49])
self.assertEqual(0, result.mask[61, 61])
buildState = BuildState(edge,
img,
img_crop_resize,
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
donorMask=CompositeImage('a', 'b', 'image', composite_mask),
pred_edges=None,
graph=None)
result = crop_resize_transform(buildState)
self.assertEqual(0, result.mask[0, 0])
self.assertEqual(0, result.mask[10, 10])
self.assertEqual(1, result.mask[51, 51])
def test_recapture_transform(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {u'Position Mapping': '(86, 0, 2860, 3973):(0, 0, 7968, 5313):90'},
u'transform matrix': {u'c': 3,
u'r': 3,
u'r0': [0.8266647515769302, 0.07178941510501777, 159.50098419871705],
u'r1': [-0.06021837537671073, 0.9344977768387763, 137.85479973696164],
u'r2': [-3.946051215265123e-05, 1.8621034727368588e-05, 1.0]},
u'op': u'Recapture'}
buildState = BuildState(edge,
self.locateFile('images/PostRotate.png'),
self.locateFile('images/PostRotate.png'), # does not matter
openImageFile(self.locateFile('images/Recapture_mask.png'), isMask=True).image_array,
(3984, 2988),
(5320, 7968),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image',
openImageFile(self.locateFile('images/Rotate_mask.png'),
isMask=True).image_array),
pred_edges=None,
graph=None)
result = recapture_transform(buildState)
self.assertEquals((5320, 7968), result.mask.shape)
buildState = BuildState(edge,
self.locateFile('images/PostRotate.png'),
self.locateFile('images/PostRotate.png'), # does not matter
openImageFile(self.locateFile('images/Recapture_mask.png'), isMask=True).image_array,
(3984, 2988),
(5320, 7968),
directory='.',
donorMask=result,
pred_edges=None,
graph=None)
result = recapture_transform(buildState)
self.assertEquals((3984, 2988), result.mask.shape)
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {},
u'transform matrix': {u'c': 3,
u'r': 3,
u'r0': [0.8266647515769302, 0.07178941510501777, 159.50098419871705],
u'r1': [-0.06021837537671073, 0.9344977768387763, 137.85479973696164],
u'r2': [-3.946051215265123e-05, 1.8621034727368588e-05, 1.0]},
u'op': u'Recapture'}
buildState = BuildState(edge,
self.locateFile('images/PostRotate.png'),
self.locateFile('images/PostRotate.png'), # does not matter
openImageFile(self.locateFile('images/Recapture_mask.png'), isMask=True).image_array,
(3984, 2988),
(5320, 7968),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image',
openImageFile(self.locateFile('images/Rotate_mask.png'),
isMask=True).image_array),
pred_edges=None,
graph=None)
result = recapture_transform(buildState)
self.assertEquals((5320, 7968), result.mask.shape)
buildState = BuildState(edge,
self.locateFile('images/PostRotate.png'),
self.locateFile('images/PostRotate.png'), # does not matter
openImageFile(self.locateFile('images/Recapture_mask.png'), isMask=True).image_array,
(3984, 2988),
(5320, 7968),
directory='.',
donorMask=result,
pred_edges=None,
graph=None)
result = recapture_transform(buildState)
self.assertEquals((3984, 2988), result.mask.shape)
def test_rotate_transform(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {u'rotation': 358},
u'transform matrix': {u'c': 3,
u'r': 3,
u'r0': [0.8266647515769302, 0.07178941510501777, 159.50098419871705],
u'r1': [-0.06021837537671073, 0.9344977768387763, 137.85479973696164],
u'r2': [-3.946051215265123e-05, 1.8621034727368588e-05, 1.0]},
u'op': u'TransformRotate'}
buildState = BuildState(edge,
self.locateFile('images/PreRotate.png'),
self.locateFile('images/PostRotate.png'),
openImageFile(self.locateFile('images/Rotate_mask.png'), isMask=True).image_array,
(3984, 2988),
(3984, 2988),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image',
openImageFile(self.locateFile('images/Rotate_mask.png'),
isMask=True).image_array),
pred_edges=None,
graph=None)
result = rotate_transform(buildState)
self.assertEqual((3984, 2988), result.mask.shape)
buildState = BuildState(edge,
self.locateFile('images/PreRotate.png'),
self.locateFile('images/PostRotate.png'),
openImageFile(self.locateFile('images/Rotate_mask.png'), isMask=True).image_array,
(3984, 2988),
(3984, 2988),
directory='.',
donorMask=result,
pred_edges=None,
graph=None)
result = rotate_transform(buildState)
self.assertEqual((3984, 2988), result.mask.shape)
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(-996, 996)',
'empty mask': 'no',
u'arguments': {u'rotation': 90},
u'op': u'TransformRotate'}
buildState = BuildState(edge,
self.locateFile('images/PreRotate.png'),
self.locateFile('images/PostRotate.png'),
openImageFile(self.locateFile('images/Rotate_mask.png'), isMask=True).image_array,
(3984, 2988),
(2988, 3984),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image',
openImageFile(self.locateFile('images/Rotate_mask.png'),
isMask=True).image_array),
pred_edges=None,
graph=None)
result = rotate_transform(buildState)
self.assertEqual((2988, 3984), result.mask.shape)
buildState = BuildState(edge,
self.locateFile('images/PreRotate.png'),
self.locateFile('images/PostRotate.png'),
openImageFile(self.locateFile('images/Rotate_mask.png'), isMask=True).image_array,
(3984, 2988),
(3984, 2988),
directory='.',
donorMask=result,
pred_edges=None,
graph=None)
result = rotate_transform(buildState)
self.assertEqual((3984, 2988), result.mask.shape)
def test_resize_transform(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(-100, -100)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other'},
u'op': u'TransformResize'}
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image', np.ones((3984, 2988), dtype=np.uint8)),
pred_edges=None,
graph=None)
result = resize_transform(buildState)
self.assertEqual((3884, 2888), result.mask.shape)
self.assertEqual(1, result.mask[11, 11])
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(-100, -100)',
'empty mask': 'no',
u'arguments': {'location': '10,10',
'interpolation': 'none',
u'transform matrix': {u'c': 3,
u'r': 3,
u'r0': [1, 0,
2],
u'r1': [0, 1, 12],
u'r2': [0, 0, 1.0]}
},
u'op': u'TransformResize'}
mask = np.zeros((3984, 2988), dtype=np.uint8)
mask[200:300, 200:300] = 1
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
mask,
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image', mask),
pred_edges=None,
graph=None)
result = resize_transform(buildState).mask
self.assertEqual((3884, 2888), result.shape)
self.assertEqual(0, result[201, 201])
self.assertEqual(1, result[212, 212])
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8) * 255,
(3984, 2988),
(3884, 2888),
directory='.',
donorMask=CompositeImage('a', 'b', 'image', result * 255),
pred_edges=None,
graph=None)
result = resize_transform(buildState).mask
self.assertEqual((3984, 2988), result.shape)
ImageWrapper(result).save('foo.png')
self.assertEqual(255, result[205, 206])
def test_cas_transform(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
'empty mask': 'no',
u'arguments': {
u'transform matrix': {u'c': 3,
u'r': 3,
u'r0': [0.7, -0.7, 50],
u'r1': [0.7, 0.7, 50],
u'r2': [0, 0, 1.0]}
},
u'op': u'TransformContentAwareScale'}
mask = np.zeros((3984, 2988), dtype=np.uint8)
cm = np.zeros((3984, 2988), dtype=np.uint8)
cm[200:300, 200:300] = 1
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
mask,
(3984, 2988),
(3984, 2988),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image', cm),
pred_edges=None,
graph=None)
result = seam_transform(buildState).mask
self.assertEqual((3984, 2988), result.shape)
self.assertEqual(0, result[201, 201])
self.assertEqual(1, result[330, 50])
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8) * 255,
(3984, 2988),
(3984, 2988),
directory='.',
donorMask=CompositeImage('a', 'b', 'image', result * 255),
pred_edges=None,
graph=None)
result = resize_transform(buildState).mask
self.assertEqual((3984, 2988), result.shape)
self.assertEqual(255, result[201, 201])
self.assertEqual(0, result[330, 50])
def test_crop_transform(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(-100, -100)',
u'location': '50,50',
'empty mask': 'no',
u'arguments': {'interpolation': 'other'},
u'op': u'TransformResize'}
cm = np.zeros((3984, 2988), dtype=np.uint8)
cm[25:75, 25:75] = 1
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=CompositeImage('a', 'b', 'image', cm),
pred_edges=None,
graph=None)
result = crop_transform(buildState).mask
self.assertEqual((3884, 2888), result.shape)
self.assertEqual(1, result[0, 0])
self.assertEqual(0, result[26, 26])
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
donorMask=CompositeImage('a', 'b', 'image', result),
pred_edges=None,
graph=None)
result = crop_transform(buildState).mask
self.assertEqual((3984, 2988), result.shape)
self.assertEqual(0, result[0, 0])
self.assertEqual(0, result[26, 26])
self.assertEqual(1, result[51, 51])
def test_select_crop_transform(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Start Time': 15,
'End Time': 25},
u'op': u'SelectCropFramrs'}
mask = video_tools.create_segment(
starttime=0,
startframe=1,
endtime=2900,
endframe=30,
frames=30,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = select_crop_frames(mock_composite)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(1, result.videomasks[0]['startframe'])
self.assertEqual(11, result.videomasks[0]['endframe'])
self.assertEqual(11, result.videomasks[0]['frames'])
self.assertEqual(0.0, result.videomasks[0]['starttime'])
self.assertEqual(1000.0, result.videomasks[0]['endtime'])
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
donorMask=cm,
compositeMask=None,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = select_crop_frames(mock_donor)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(15, result.videomasks[0]['startframe'])
self.assertEqual(44, result.videomasks[0]['endframe'])
self.assertEqual(30, result.videomasks[0]['frames'])
self.assertEqual(1400, result.videomasks[0]['starttime'])
self.assertEqual(4300.0, result.videomasks[0]['endtime'])
def test_copy_paste_frames_insert(self):
# copy into same spot
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 15,
'add type': 'insert',
'Number of Frames': 11,
'Start Time': 15,
'End Time': 25},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=1400,
startframe=15,
endtime=2400,
endframe=25,
frames=11,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(26, result.videomasks[0]['startframe'])
self.assertEqual(36, result.videomasks[0]['endframe'])
self.assertEqual(11, result.videomasks[0]['frames'])
self.assertEqual(2500, result.videomasks[0]['starttime'])
self.assertEqual(3500.0, result.videomasks[0]['endtime'])
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
self.assertEqual(0, len(result.videomasks))
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 100,
'add type': 'insert',
'Number of Frames': 11,
'Start Time': 15,
'End Time': 25},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=9000,
startframe=91,
endtime=15000,
endframe=151,
frames=61,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
# more complex, insert
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(115, result.videomasks[1]['startframe'])
self.assertEqual(166, result.videomasks[1]['endframe'])
self.assertEqual(52, result.videomasks[1]['frames'])
self.assertEqual(11400, result.videomasks[1]['starttime'])
self.assertEqual(16500.0, result.videomasks[1]['endtime'])
# more complex, drop
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
# two because one was moved down...could combine them
# but it matters little for our purposes.
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(100, result.videomasks[1]['startframe'])
self.assertEqual(136, result.videomasks[1]['endframe'])
self.assertEqual(37, result.videomasks[1]['frames'])
self.assertEqual(9900, result.videomasks[1]['starttime'])
self.assertEqual(13500.0, result.videomasks[1]['endtime'])
def test_copy_paste_frames_replace(self):
# copy into same spot
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 15,
'add type': 'replace',
'Number of Frames': 11,
'Select Start Time': 15},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=1400,
startframe=15,
endtime=2400,
endframe=25,
frames=31,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(0, len(result.videomasks))
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.isComposite = False
mock_donor.arguments.return_value = edge['arguments']
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(
{'endframe': 25, 'rate': 10, 'starttime': 1400, 'frames': 11, 'startframe': 15, 'endtime': 2400,
'type': 'video'},
result.videomasks[0])
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 100,
'add type': 'insert',
'Number of Frames': 11,
'Select Start Time': 15},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=9000,
startframe=91,
endtime=15000,
endframe=151,
frames=61,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
# more complex, insert
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(115, result.videomasks[1]['startframe'])
self.assertEqual(166, result.videomasks[1]['endframe'])
self.assertEqual(52, result.videomasks[1]['frames'])
self.assertEqual(11400, result.videomasks[1]['starttime'])
self.assertEqual(16500.0, result.videomasks[1]['endtime'])
# more complex, drop
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.arguments.return_value = edge['arguments']
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
# two because one was moved down...could combine them
# but it matters little for our purposes.
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(100, result.videomasks[1]['startframe'])
self.assertEqual(136, result.videomasks[1]['endframe'])
self.assertEqual(37, result.videomasks[1]['frames'])
self.assertEqual(9900, result.videomasks[1]['starttime'])
self.assertEqual(13500.0, result.videomasks[1]['endtime'])
# REPLACE
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 100,
'add type': 'replace',
'Number of Frames': 11,
'Select Start Time': 15},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=9000,
startframe=91,
endtime=15000,
endframe=151,
frames=61,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
# more complex, insert
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual([{'endframe': 99, 'rate': 10, 'starttime': 9000, 'error': 0, 'frames': 9, 'startframe': 91,
'endtime': 9800.0, 'type': 'video'},
{'endframe': 151, 'rate': 10, 'starttime': 11400, 'error': 0, 'frames': 37,
'startframe': 115,
'endtime': 15000, 'type': 'video'}],
result.videomasks
)
# more complex, drop
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.arguments.return_value = edge['arguments']
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
# two because one was moved down...could combine them
# but it matters little for our purposes.
self.assertEqual(2, len(result.videomasks))
self.assertEqual([{'endframe': 25, 'rate': 10, 'starttime': 1400, 'frames': 11, 'startframe': 15,
'endtime': 2400, 'type': 'video'},
{'endframe': 151, 'rate': 10, 'starttime': 9000, 'error': 0, 'frames': 61,
'startframe': 91, 'endtime': 15000, 'type': 'video'}],
result.videomasks)
def test_paste_add_frames(self):
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'add type': 'insert',
'Number of Frames': 11,
'Start Time': 15,
'End Time': 25},
u'op': u'PasteAddFrames'}
mask = video_tools.create_segment(
starttime=1400,
startframe=15,
endtime=2400,
endframe=25,
frames=11,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = paste_add_frames(mock_composite)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(26, result.videomasks[0]['startframe'])
self.assertEqual(36, result.videomasks[0]['endframe'])
self.assertEqual(11, result.videomasks[0]['frames'])
self.assertEqual(2500, result.videomasks[0]['starttime'])
self.assertEqual(3500.0, result.videomasks[0]['endtime'])
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.arguments.return_value = edge['arguments']
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = paste_add_frames(mock_donor)
self.assertEqual(0, len(result.videomasks))
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'add type': 'insert',
'Number of Frames': 91,
'Start Time': 151,
'End Time': 61},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=9000,
startframe=91,
endtime=15000,
endframe=151,
frames=61,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
# more complex, insert
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = paste_add_frames(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(115, result.videomasks[1]['startframe'])
self.assertEqual(166, result.videomasks[1]['endframe'])
self.assertEqual(52, result.videomasks[1]['frames'])
self.assertEqual(11400, result.videomasks[1]['starttime'])
self.assertEqual(16500.0, result.videomasks[1]['endtime'])
# more complex, drop
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.isComposite = False
mock_donor.arguments.return_value = edge['arguments']
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = paste_add_frames(mock_donor)
# two because one was moved down...could combine them
# but it matters little for our purposes.
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(100, result.videomasks[1]['startframe'])
self.assertEqual(136, result.videomasks[1]['endframe'])
self.assertEqual(37, result.videomasks[1]['frames'])
self.assertEqual(9900, result.videomasks[1]['starttime'])
self.assertEqual(13500.0, result.videomasks[1]['endtime'])
def test_copy_paste_frames_replace(self):
# copy into same spot
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 15,
'add type': 'replace',
'Number of Frames': 11,
'Select Start Time': 15},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=1400,
startframe=15,
endtime=2400,
endframe=25,
frames=31,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(0, len(result.videomasks))
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.isComposite = False
mock_donor.arguments.return_value = edge['arguments']
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
self.assertEqual(1, len(result.videomasks))
self.assertEqual(
{'endframe': 25, 'rate': 10, 'starttime': 1400, 'frames': 11, 'startframe': 15, 'endtime': 2400,
'type': 'video', 'error':0},
result.videomasks[0])
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 100,
'add type': 'insert',
'Number of Frames': 11,
'Select Start Time': 15},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=9000,
startframe=91,
endtime=15000,
endframe=151,
frames=61,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
# more complex, insert
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(115, result.videomasks[1]['startframe'])
self.assertEqual(166, result.videomasks[1]['endframe'])
self.assertEqual(52, result.videomasks[1]['frames'])
self.assertEqual(11400, result.videomasks[1]['starttime'])
self.assertEqual(16500.0, result.videomasks[1]['endtime'])
# more complex, drop
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.arguments.return_value = edge['arguments']
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
# two because one was moved down...could combine them
# but it matters little for our purposes.
self.assertEqual(2, len(result.videomasks))
self.assertEqual(91, result.videomasks[0]['startframe'])
self.assertEqual(99, result.videomasks[0]['endframe'])
self.assertEqual(9, result.videomasks[0]['frames'])
self.assertEqual(9000, result.videomasks[0]['starttime'])
self.assertEqual(9800.0, result.videomasks[0]['endtime'])
self.assertEqual(100, result.videomasks[1]['startframe'])
self.assertEqual(136, result.videomasks[1]['endframe'])
self.assertEqual(37, result.videomasks[1]['frames'])
self.assertEqual(9900, result.videomasks[1]['starttime'])
self.assertEqual(13500.0, result.videomasks[1]['endtime'])
# REPLACE
edge = {u'maskname': u'Rotate_mask.png',
u'inputmaskname': None,
u'shape change': u'(0, 0)',
'empty mask': 'no',
u'arguments': {'interpolation': 'other',
'Dest Paste Time': 100,
'add type': 'replace',
'Number of Frames': 11,
'Select Start Time': 15},
u'op': u'CopyPaste'}
mask = video_tools.create_segment(
starttime=9000,
startframe=91,
endtime=15000,
endframe=151,
frames=61,
rate=10,
error=0,
type='video')
cm = CompositeImage('a', 'b', 'video', [mask])
graph = Mock()
# more complex, insert
buildState = BuildState(edge,
np.random.randint(0, 255, (3984, 2988, 3), dtype=np.uint8),
np.random.randint(0, 255, (3884, 2888, 3), dtype=np.uint8),
np.zeros((3984, 2988), dtype=np.uint8),
(3984, 2988),
(3884, 2888),
directory='.',
compositeMask=cm,
pred_edges=None,
graph=graph)
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_composite:
mock_composite.compositeMask = cm
mock_composite.edge = edge
mock_composite.arguments.return_value = edge['arguments']
mock_composite.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 9900,
'startframe': 100,
'endtime': 11300,
'endframe': 114,
'frames': 15,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_composite)
self.assertEqual(2, len(result.videomasks))
self.assertEqual([{'endframe': 99, 'rate': 10, 'starttime': 9000, 'error': 0, 'frames': 9, 'startframe': 91,
'endtime': 9800.0, 'type': 'video'},
{'endframe': 151, 'rate': 10, 'starttime': 11400, 'error': 0, 'frames': 37,
'startframe': 115,
'endtime': 15000, 'type': 'video'}],
result.videomasks
)
# more complex, drop
with patch('maskgen.mask_rules.BuildState', spec=buildState) as mock_donor:
mock_donor.donorMask = cm
mock_donor.edge = edge
mock_donor.arguments.return_value = edge['arguments']
mock_donor.isComposite = False
mock_donor.getMasksFromEdge.return_value = [video_tools.create_segment(**{
'starttime': 1400,
'startframe': 15,
'endtime': 2400,
'endframe': 25,
'frames': 11,
'type': 'video',
'rate': 10
})]
result = copy_paste_frames(mock_donor)
# two because one was moved down...could combine them
# but it matters little for our purposes.
self.assertEqual(2, len(result.videomasks))
self.assertEqual([{'endframe': 25, 'rate': 10, 'starttime': 1400, 'frames': 11, 'startframe': 15,
'endtime': 2400, 'type': 'video','error':0},
{'endframe': 151, 'rate': 10, 'starttime': 9000, 'error': 0, 'frames': 61,
'startframe': 91, 'endtime': 15000, 'type': 'video', 'error':0}],
result.videomasks)
def test_compositeIdAssigner(self):
G = nx.DiGraph(name="Empty")
for i in xrange(1, 16):
G.add_node(str(i), nodetype='base' if i == 1 else ('final' if i in [6, 7, 9, 10, 13] else 'intermediate'))
G.add_edge('1', '2', op='OutputPng', recordInCompositeMask=True)
G.add_edge('2', '3', op='TransformAffine', recordInCompositeMask=False)
G.add_edge('2', '4', op='OutputPng', recordInCompositeMask=True)
G.add_edge('3', '5', op='OutputPng', recordInCompositeMask=True)
G.add_edge('5', '6', op='OutputPng', recordInCompositeMask=True)
G.add_edge('5', '7', op='OutputPng', recordInCompositeMask=True)
G.add_edge('4', '8', op='TransformResize', recordInCompositeMask=False)
G.add_edge('8', '9', op='OutputPng', recordInCompositeMask=True)
G.add_edge('8', '10', op='OutputPng', recordInCompositeMask=True)
G.add_edge('1', '11', op='OutputPng', recordInCompositeMask=False)
G.add_edge('11', '12', op='OutputPng', recordInCompositeMask=True)
G.add_edge('12', '13', op='OutputPng', recordInCompositeMask=True)
G.add_edge('5', '14', op='TransformResize', recordInCompositeMask=False)
G.add_edge('14', '15', op='OutputPng', recordInCompositeMask=False)
g = ImageGraphB(G)
probe12branch1 = np.random.randint(0, 2, size=(10, 10))
probe12branch2 = np.random.randint(0, 2, size=(10, 10))
probe12branch3 = np.random.randint(0, 2, size=(12, 12))
probe24branch2 = np.random.randint(0, 2, size=(10, 10))
probe35 = np.random.randint(0, 2, size=(10, 10))
probe35branch3 = np.random.randint(0, 2, size=(12, 12))
probe56 = np.random.randint(0, 2, size=(10, 10))
probe57 = np.random.randint(0, 2, size=(10, 10))
probe89 = np.random.randint(0, 2, size=(10, 10))
probe810 = np.random.randint(0, 2, size=(10, 10))
probe1112 = np.random.randint(0, 2, size=(11, 11))
probes = [Probe(('1', '2'), '10', '1', None, targetMaskImage=probe12branch2),
Probe(('1', '2'), '9', '1', None, targetMaskImage=probe12branch2),
Probe(('1', '2'), '6', '1', None, targetMaskImage=probe12branch1),
Probe(('1', '2'), '7', '1', None, targetMaskImage=probe12branch1),
Probe(('1', '2'), '15', '1', None, targetMaskImage=probe12branch3),
Probe(('2', '4'), '9', '1', None, targetMaskImage=probe24branch2),
Probe(('2', '4'), '10', '1', None, targetMaskImage=probe24branch2),
Probe(('3', '5'), '6', '1', None, targetMaskImage=probe35),
Probe(('3', '5'), '7', '1', None, targetMaskImage=probe35),
Probe(('3', '5'), '15', '1', None, targetMaskImage=probe35branch3),
Probe(('5', '6'), '6', '1', None, targetMaskImage=probe56),
Probe(('5', '7'), '7', '1', None, targetMaskImage=probe57),
Probe(('8', '9'), '9', '1', None, targetMaskImage=probe89),
Probe(('8', '10'), '10', '1', None, targetMaskImage=probe810),
Probe(('11', '12'), '13', '1', None, targetMaskImage=probe1112)
]
graphCompositeIdAssigner = GraphCompositeIdAssigner(g)
probes = graphCompositeIdAssigner.updateProbes(probes, 'builder')
index = {}
targets = {}
for probe in probes:
groupid = probe.composites['builder']['groupid']
targetid = probe.composites['builder']['bit number']
index[(probe.edgeId, probe.finalNodeId)] = (groupid, targetid)
self.assertTrue(targetid > 0)
if (groupid, targetid) not in targets:
targets[(groupid, targetid)] = probe.edgeId
else:
self.assertEquals(targets[(groupid, targetid)], probe.edgeId)
self.assertEquals(index[(('1', '2'), '10')], index[(('1', '2'), '9')])
self.assertEquals(index[(('2', '4'), '10')], index[(('2', '4'), '9')])
self.assertNotEquals(index[(('1', '2'), '10')], index[(('1', '2'), '7')])
| 48.170898
| 120
| 0.468019
| 7,017
| 77,796
| 5.100328
| 0.048881
| 0.075023
| 0.026489
| 0.026824
| 0.924027
| 0.909386
| 0.896728
| 0.872587
| 0.85163
| 0.84431
| 0
| 0.094276
| 0.409893
| 77,796
| 1,614
| 121
| 48.200743
| 0.685305
| 0.012893
| 0
| 0.848445
| 0
| 0
| 0.114679
| 0.017425
| 0
| 0
| 0
| 0.00062
| 0.124421
| 1
| 0.01456
| false
| 0
| 0.002647
| 0.002647
| 0.021178
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d19a223713b9747e07d034d09a89f12e573a4f7
| 99
|
py
|
Python
|
tests/BlazingSQLTest/Runner/__init__.py
|
Ethyling/blazingsql
|
973e868e5f0a80189b69e56090ef2dc26ac90aa1
|
[
"Apache-2.0"
] | 1,059
|
2019-08-05T13:14:42.000Z
|
2019-11-28T21:03:23.000Z
|
tests/BlazingSQLTest/Runner/__init__.py
|
ciusji/blazingsql
|
a35643d4c983334757eee96d5b9005b8b9fbd21b
|
[
"Apache-2.0"
] | 1,140
|
2019-11-30T00:36:17.000Z
|
2022-03-31T22:51:51.000Z
|
tests/BlazingSQLTest/Runner/__init__.py
|
ciusji/blazingsql
|
a35643d4c983334757eee96d5b9005b8b9fbd21b
|
[
"Apache-2.0"
] | 109
|
2019-12-13T08:31:43.000Z
|
2022-03-31T06:01:26.000Z
|
from .testCase import TestCase
from .testCase import ConfigTest
from .testSuites import TestSuites
| 24.75
| 34
| 0.848485
| 12
| 99
| 7
| 0.416667
| 0.285714
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 99
| 3
| 35
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3d31f72189e64788e46aa493643502860b20e319
| 238
|
py
|
Python
|
exarl/__init__.py
|
lanl/minRL
|
f935142479738de41bc93640edb6a3e3cb0778cc
|
[
"BSD-3-Clause"
] | null | null | null |
exarl/__init__.py
|
lanl/minRL
|
f935142479738de41bc93640edb6a3e3cb0778cc
|
[
"BSD-3-Clause"
] | 1
|
2021-09-24T17:48:51.000Z
|
2021-09-24T17:51:51.000Z
|
exarl/__init__.py
|
lanl/minRL
|
f935142479738de41bc93640edb6a3e3cb0778cc
|
[
"BSD-3-Clause"
] | 1
|
2021-09-24T17:50:59.000Z
|
2021-09-24T17:50:59.000Z
|
# import faulthandler; faulthandler.enable()
from exarl.base import ExaComm
from exarl.base import ExaAgent
from exarl.base import ExaEnv
from exarl.base import ExaWorkflow
from exarl.base import ExaLearner
from exarl.base import ExaData
| 29.75
| 44
| 0.836134
| 34
| 238
| 5.852941
| 0.352941
| 0.271357
| 0.39196
| 0.572864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 238
| 7
| 45
| 34
| 0.947619
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
181c5ef4825c7bff967997129dccabb8e9a8125e
| 120
|
py
|
Python
|
terminal pixy test.py
|
mr-finnie-mac/meadeor-drone
|
00413f569c782e511da803e007ba1f36f272df59
|
[
"BSD-3-Clause"
] | 1
|
2021-06-14T21:23:08.000Z
|
2021-06-14T21:23:08.000Z
|
terminal pixy test.py
|
mr-finnie-mac/meadeor-drone
|
00413f569c782e511da803e007ba1f36f272df59
|
[
"BSD-3-Clause"
] | null | null | null |
terminal pixy test.py
|
mr-finnie-mac/meadeor-drone
|
00413f569c782e511da803e007ba1f36f272df59
|
[
"BSD-3-Clause"
] | null | null | null |
#Terminal request test
import os
os.system("cd ../build/get_blocks_cpp_demo/")
os.system ("sudo ./get_blocks_cpp_demo")
| 24
| 45
| 0.766667
| 20
| 120
| 4.3
| 0.65
| 0.186047
| 0.27907
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 120
| 5
| 46
| 24
| 0.781818
| 0.175
| 0
| 0
| 0
| 0
| 0.585859
| 0.505051
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
182e209a6470b3733bb2689218b558fc91bec476
| 752
|
py
|
Python
|
month01/all_code/day07/demo01.py
|
chaofan-zheng/tedu-python-demo
|
abe983ddc52690f4726cf42cc6390cba815026d8
|
[
"Apache-2.0"
] | 4
|
2021-01-07T14:25:15.000Z
|
2021-02-01T10:36:10.000Z
|
month01/all_code/day07/demo01.py
|
chaofan-zheng/tedu-python-demo
|
abe983ddc52690f4726cf42cc6390cba815026d8
|
[
"Apache-2.0"
] | null | null | null |
month01/all_code/day07/demo01.py
|
chaofan-zheng/tedu-python-demo
|
abe983ddc52690f4726cf42cc6390cba815026d8
|
[
"Apache-2.0"
] | null | null | null |
"""
for - for
外层循环执行1次 (控制行)
内层 多 (控制列)
"""
"""
print("老王", end=" ")
print("老王", end=" ")
print("老王", end=" ")
print("老王", end=" ")
print("老王", end=" ")
print() # 换行
print("老王", end=" ")
print("老王", end=" ")
print("老王", end=" ")
print("老王", end=" ")
print("老王", end=" ")
print() # 换行
"""
# for c in range(5):# 0 1 2 3 4
# print("老王", end=" ")
# print() # 换行
#
# for c in range(5):# 0 1 2 3 4
# print("老王", end=" ")
# print() # 换行
for r in range(6): # 0 1
for c in range(3): # 0 1 2 3 4 0 1 2 3 4
print("老王", end=" ")
print() # 换行
for r in range(5):
for c in range(6):
if r % 2 == 0:
print("#", end="")
else:
print("*", end="")
print()
| 18.8
| 47
| 0.414894
| 118
| 752
| 2.644068
| 0.194915
| 0.358974
| 0.416667
| 0.625
| 0.740385
| 0.724359
| 0.724359
| 0.724359
| 0.724359
| 0.724359
| 0
| 0.061753
| 0.332447
| 752
| 39
| 48
| 19.282051
| 0.559761
| 0.296543
| 0
| 0.181818
| 0
| 0
| 0.019763
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.454545
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
1868f52fd0956dfed7005ce3925687de19fcb044
| 7,598
|
py
|
Python
|
libkloudtrader/equities/trade.py
|
kloudtrader-github/libkloudtrader
|
abf5500e544e4f7b8834aacbd1dacf37ce11d023
|
[
"Apache-2.0"
] | null | null | null |
libkloudtrader/equities/trade.py
|
kloudtrader-github/libkloudtrader
|
abf5500e544e4f7b8834aacbd1dacf37ce11d023
|
[
"Apache-2.0"
] | null | null | null |
libkloudtrader/equities/trade.py
|
kloudtrader-github/libkloudtrader
|
abf5500e544e4f7b8834aacbd1dacf37ce11d023
|
[
"Apache-2.0"
] | null | null | null |
#Trading apis
#TODO:
'''
time_and_sales()
options and multileg order support
improve error and exception handling
'''
import sys
sys.path.append("..")
from time import sleep
import json
import requests
import os
from libkloudtrader.defaults import ACCESS_TOKEN,ACCOUNT_NUMBER
SANDBOX_API_URL="https://sandbox.tradier.com"
BROKERAGE_API_URL="https://api.tradier.com"
STREAMING_API_URL="https://stream.tradier.com"
def get_headers(access_token):
headers = {"Accept":"application/json",
"Authorization":"Bearer "+access_token}
return headers
'''Trading'''
#Equity
def buy_preview(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()),
'side':'buy',
'quantity':str(quantity),
'type':str(order_type.lower()),
'price':price,
'stop':stop,
'preview':'true'
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def buy_to_cover_preview(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()),
'side':'buy_to_cover',
'quantity':str(quantity),
'type':str(order_type.lower()),
'price':price,
'stop':stop,
'preview':'true'
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def sell_preview(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()),
'side':'sell',
'quantity':str(quantity),
'type':str(order_type.lower()),
'price':None,
'stop':None,
'preview':'true'
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def sell_short_preview(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()),
'side':'sell_short',
'quantity':str(quantity),
'type':str(order_type.lower()),
'price':None,
'stop':None,
'preview':'true'
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def buy(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()), #time for which the order will be remain in effect (Day or GTC)
'side':'buy',
'quantity':str(quantity),
'type':str(order_type.lower()), #market, limit, etc.
'price':str(price),
'stop':str(stop)
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def buy_to_cover(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()), #time for which the order will be remain in effect (Day or GTC)
'side':'buy_to_cover',
'quantity':str(quantity),
'type':str(order_type.lower()), #market, limit, etc.
'price':str(price),
'stop':str(stop)
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def sell(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()), #time for which the order will be remain in effect (Day or GTC)
'side':'sell',
'quantity':str(quantity),
'type':str(order_type.lower()), #market, limit, etc.
'price':str(price),
'stop':str(stop)
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def sell_short(symbol,quantity,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
post_params={
'class':'equity',
'symbol':str(symbol.upper()),
'duration':str(duration.lower()), #time for which the order will be remain in effect (Day or GTC)
'side':'sell_short',
'quantity':str(quantity),
'type':str(order_type.lower()), #market, limit, etc.
'price':str(price),
'stop':str(stop)
}
r=requests.post(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/",params=post_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def change_equity_order(order_id,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER,duration="day",order_type="market",price=None,stop=None):
put_params={
'order_id':order_id,
'type':str(order_type.lower()),
'duration':str(duration),
'price':str(price),
'stop':str(stop)
}
r=requests.put(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/"+str(order_id),params=put_params,headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
def cancel_equity_order(order_id,access_token=ACCESS_TOKEN,account_number=ACCOUNT_NUMBER):
r=requests.delete(BROKERAGE_API_URL+"/v1/accounts/"+str(account_number)+"/orders/"+str(order_id),headers=get_headers(access_token))
try:
return r.json()
except:
raise Exception("Did not receive any data. Status Code: %d"%r.status_code)
| 35.839623
| 154
| 0.660042
| 989
| 7,598
| 4.893832
| 0.103134
| 0.075
| 0.040909
| 0.054545
| 0.893182
| 0.888843
| 0.888843
| 0.888843
| 0.882025
| 0.882025
| 0
| 0.001614
| 0.184391
| 7,598
| 211
| 155
| 36.009479
| 0.779409
| 0.057384
| 0
| 0.769697
| 0
| 0
| 0.189834
| 0
| 0
| 0
| 0
| 0.004739
| 0
| 1
| 0.066667
| false
| 0
| 0.036364
| 0
| 0.169697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43fb165cd6beb155049c36e40045cba97143760b
| 8,005
|
py
|
Python
|
tests/test_api.py
|
iliashevrin/redis-ordered-set
|
4c5c4398589c192c61b79a5a5f741c6c7a985579
|
[
"MIT"
] | 5
|
2018-01-04T09:34:30.000Z
|
2021-08-10T01:41:04.000Z
|
tests/test_api.py
|
iliashevrin/redis-ordered-set
|
4c5c4398589c192c61b79a5a5f741c6c7a985579
|
[
"MIT"
] | null | null | null |
tests/test_api.py
|
iliashevrin/redis-ordered-set
|
4c5c4398589c192c61b79a5a5f741c6c7a985579
|
[
"MIT"
] | null | null | null |
from rmtest import ModuleTestCase
from redis import ResponseError
import unittest
class OMTestCase(ModuleTestCase('../src/orderedset.so')):
def test_add_head(self):
c, s = self.client, self.server
self.assertEquals(1, self.cmd('os.addhead', 'test', 'foo'))
self.assertEquals(2, self.cmd('os.addhead', 'test', 'bar!', 'baz'))
self.assertEquals(3, self.cmd('os.card', 'test'))
self.assertEquals(['bar!','baz','foo'], self.cmd('os.members', 'test'))
self.assertEquals(1, self.cmd('os.addhead', 'test', 'foo', 'qux'))
self.assertEquals(['foo','qux','bar!','baz'], self.cmd('os.members', 'test'))
def test_add_tail(self):
c, s = self.client, self.server
self.assertEquals(1, self.cmd('os.addtail', 'test', 'foo'))
self.assertEquals(2, self.cmd('os.addtail', 'test', 'bar!', 'baz'))
self.assertEquals(3, self.cmd('os.card', 'test'))
self.assertEquals(['foo','bar!','baz'], self.cmd('os.members', 'test'))
self.assertEquals(1, self.cmd('os.addtail', 'test', 'foo', 'qux'))
self.assertEquals(['bar!','baz','foo','qux'], self.cmd('os.members', 'test'))
def test_add_after(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo')
self.assertEquals(1, self.cmd('os.addafter', 'test', 'foo', 'bar!'))
self.assertEquals(2, self.cmd('os.card', 'test'))
self.assertEquals(['foo','bar!'], self.cmd('os.members', 'test'))
self.assertEquals(2, self.cmd('os.addafter', 'test', 'bar!', 'baz', 'baz', 'bar!', 'qux'))
self.assertEquals(['foo','baz','bar!','qux'], self.cmd('os.members', 'test'))
self.assertEquals(None, self.cmd('os.addafter', 'test', 'notexist', 'new'))
self.assertEquals(0, self.cmd('os.addafter', 'test', 'qux', 'foo'))
self.assertEquals(['baz','bar!','qux','foo'], self.cmd('os.members', 'test'))
self.assertEquals(4, self.cmd('os.card', 'test'))
def test_add_before(self):
c, s = self.client, self.server
self.cmd('os.addtail', 'test', 'foo')
self.assertEquals(1, self.cmd('os.addbefore', 'test', 'foo', 'bar!'))
self.assertEquals(2, self.cmd('os.card', 'test'))
self.assertEquals(['bar!','foo'], self.cmd('os.members', 'test'))
self.assertEquals(2, self.cmd('os.addbefore', 'test', 'foo', 'baz', 'baz', 'foo', 'qux'))
self.assertEquals(['bar!','baz','foo','qux'], self.cmd('os.members', 'test'))
self.assertEquals(None, self.cmd('os.addbefore', 'test', 'notexist', 'new'))
self.assertEquals(0, self.cmd('os.addbefore', 'test', 'bar!', 'qux'))
self.assertEquals(['qux','bar!','baz','foo'], self.cmd('os.members', 'test'))
self.assertEquals(4, self.cmd('os.card', 'test'))
def test_remove(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz', 'qux')
self.assertEquals(1, self.cmd('os.rem', 'test', 'bar!'))
self.assertEquals(3, self.cmd('os.card', 'test'))
self.assertEquals(2, self.cmd('os.rem', 'test', 'foo', 'baz', 'notexist'))
self.assertEquals(1, self.cmd('os.card', 'test'))
self.assertEquals(1, self.cmd('os.rem', 'test', 'qux', 'bar!'))
self.assertEquals(0, self.cmd('os.card', 'test'))
def test_rem_head(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz', 'qux')
self.assertEquals(1, self.cmd('os.remhead', 'test', 1))
self.assertEquals(['bar!','baz','qux'], self.cmd('os.members', 'test'))
self.assertEquals(1, self.cmd('os.remhead', 'test'))
self.assertEquals(2, self.cmd('os.remhead', 'test', 10))
self.assertEquals(0, self.cmd('os.card', 'test'))
def test_rem_tail(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz', 'qux')
self.assertEquals(1, self.cmd('os.remtail', 'test', 1))
self.assertEquals(['foo','bar!','baz'], self.cmd('os.members', 'test'))
self.assertEquals(1, self.cmd('os.remtail', 'test'))
self.assertEquals(2, self.cmd('os.remtail', 'test', 10))
self.assertEquals(0, self.cmd('os.card', 'test'))
def test_compare(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz')
self.assertEquals(-1, self.cmd('os.compare', 'test', 'foo', 'bar!'))
self.assertEquals(0, self.cmd('os.compare', 'test', 'bar!', 'bar!'))
self.assertEquals(1, self.cmd('os.compare', 'test', 'baz', 'bar!'))
self.assertEquals(None, self.cmd('os.compare', 'test', 'notexist', 'bar!'))
for _ in c.retry_with_rdb_reload():
self.assertEquals(-1, self.cmd('os.compare', 'test', 'foo', 'bar!'))
self.assertEquals(0, self.cmd('os.compare', 'test', 'bar!', 'bar!'))
self.assertEquals(1, self.cmd('os.compare', 'test', 'baz', 'bar!'))
self.assertEquals(None, self.cmd('os.compare', 'test', 'notexist', 'bar!'))
def test_next(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz', 'qux')
self.assertEquals(['bar!'], self.cmd('os.next', 'test', 'foo'))
self.assertEquals([], self.cmd('os.next', 'test', 'qux'))
self.assertEquals(['bar!','baz','qux'], self.cmd('os.next', 'test', 'foo', 3))
self.assertEquals(['bar!','baz','qux'], self.cmd('os.next', 'test', 'foo', 10))
self.assertEquals(['baz','qux'], self.cmd('os.next', 'test', 'bar!', 0))
self.assertEquals(['qux'], self.cmd('os.next', 'test', 'baz', 2))
self.assertEquals([], self.cmd('os.next', 'test', 'notexist', 1))
self.assertRaises(ResponseError, self.cmd, 'os.next', 'test', 'baz', 'invalid')
for _ in c.retry_with_rdb_reload():
self.assertEquals(['bar!','baz','qux'], self.cmd('os.next', 'test', 'foo', 3))
self.assertEquals(['qux'], self.cmd('os.next', 'test', 'baz', 2))
self.assertEquals(['baz','qux'], self.cmd('os.next', 'test', 'bar!', 0))
def test_prev(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz', 'qux')
self.assertEquals(['baz'], self.cmd('os.prev', 'test', 'qux'))
self.assertEquals([], self.cmd('os.prev', 'test', 'foo'))
self.assertEquals(['baz','bar!','foo'], self.cmd('os.prev', 'test', 'qux', 3))
self.assertEquals(['baz','bar!','foo'], self.cmd('os.prev', 'test', 'qux', 10))
self.assertEquals(['bar!','foo'], self.cmd('os.prev', 'test', 'baz', 0))
self.assertEquals(['foo'], self.cmd('os.prev', 'test', 'bar!', 2))
self.assertEquals([], self.cmd('os.prev', 'test', 'notexist', 1))
self.assertRaises(ResponseError, self.cmd, 'os.next', 'test', 'baz', 'invalid')
for _ in c.retry_with_rdb_reload():
self.assertEquals(['baz','bar!','foo'], self.cmd('os.prev', 'test', 'qux', 3))
self.assertEquals(['foo'], self.cmd('os.prev', 'test', 'bar!', 2))
self.assertEquals(['bar!','foo'], self.cmd('os.prev', 'test', 'baz', 0))
def test_head(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz', 'qux')
self.assertEquals(['foo'], self.cmd('os.head', 'test'))
self.assertEquals(['foo','bar!','baz'], self.cmd('os.head', 'test', 3))
self.assertEquals(['foo','bar!','baz', 'qux'], self.cmd('os.head', 'test', 10))
self.assertEquals(['foo','bar!','baz', 'qux'], self.cmd('os.head', 'test', 0))
self.assertRaises(ResponseError, self.cmd, 'os.head', 'test', 'invalid')
for _ in c.retry_with_rdb_reload():
self.assertEquals(['foo','bar!','baz'], self.cmd('os.head', 'test', 3))
self.assertEquals(['foo','bar!','baz', 'qux'], self.cmd('os.head', 'test', 0))
def test_tail(self):
c, s = self.client, self.server
self.cmd('os.addhead', 'test', 'foo', 'bar!', 'baz', 'qux')
self.assertEquals(['qux'], self.cmd('os.tail', 'test'))
self.assertEquals(['qux','baz','bar!'], self.cmd('os.tail', 'test', 3))
self.assertEquals(['qux','baz','bar!','foo'], self.cmd('os.tail', 'test', 10))
self.assertEquals(['qux','baz','bar!','foo'], self.cmd('os.tail', 'test', 0))
self.assertRaises(ResponseError, self.cmd, 'os.tail', 'test', 'invalid')
for _ in c.retry_with_rdb_reload():
self.assertEquals(['qux','baz','bar!'], self.cmd('os.tail', 'test', 3))
self.assertEquals(['qux','baz','bar!','foo'], self.cmd('os.tail', 'test', 0))
if __name__ == "__main__":
unittest.main()
| 44.97191
| 92
| 0.61649
| 1,161
| 8,005
| 4.211025
| 0.055125
| 0.143179
| 0.184087
| 0.073021
| 0.932502
| 0.898343
| 0.876662
| 0.839845
| 0.76846
| 0.732256
| 0
| 0.010367
| 0.108307
| 8,005
| 178
| 93
| 44.97191
| 0.674559
| 0
| 0
| 0.503704
| 0
| 0
| 0.258431
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.088889
| false
| 0
| 0.022222
| 0
| 0.118519
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a10777e1b1e4221893b90b0d57af475947faeb42
| 189
|
py
|
Python
|
tests/unit/test_mf.py
|
irec-org/irec
|
a7ec8a53dcb6489c31f64d7192720baca50e0049
|
[
"MIT"
] | 2
|
2022-02-09T17:50:20.000Z
|
2022-02-09T17:50:22.000Z
|
tests/unit/test_mf.py
|
irec-org/irec
|
a7ec8a53dcb6489c31f64d7192720baca50e0049
|
[
"MIT"
] | 1
|
2022-03-16T15:29:03.000Z
|
2022-03-17T01:20:02.000Z
|
tests/unit/test_mf.py
|
irec-org/irec
|
a7ec8a53dcb6489c31f64d7192720baca50e0049
|
[
"MIT"
] | null | null | null |
from irec.recommendation.matrix_factorization.MF import MF
from irec.recommendation.matrix_factorization.NMF import NMF
def test_create_value_functions():
assert isinstance(NMF(), MF)
| 31.5
| 60
| 0.830688
| 25
| 189
| 6.08
| 0.6
| 0.105263
| 0.289474
| 0.368421
| 0.539474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 189
| 5
| 61
| 37.8
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a142c964a23274b8ae700d8ad68b6196f33becef
| 2,875
|
py
|
Python
|
rplugin/python3/deoplete/sources/jira.py
|
balta2ar/deoplete-jira
|
ab34324cb9c45ba5dc5831842bee3462815d7891
|
[
"MIT"
] | null | null | null |
rplugin/python3/deoplete/sources/jira.py
|
balta2ar/deoplete-jira
|
ab34324cb9c45ba5dc5831842bee3462815d7891
|
[
"MIT"
] | null | null | null |
rplugin/python3/deoplete/sources/jira.py
|
balta2ar/deoplete-jira
|
ab34324cb9c45ba5dc5831842bee3462815d7891
|
[
"MIT"
] | null | null | null |
"""
This is RT source plugin for deoplete. It completes RequestTracker numbers from
a cache file.
# Install:
1. Copy the file to $HOME/.vim/bundle/deoplete.nvim/rplugin/python3/deoplete/sources/
2. pip install regex (https://pypi.python.org/pypi/regex supports cool fuzzy matching)
"""
from .base import Base
def log(msg):
with open('/tmp/deoplete-jira.log', 'a') as file_:
file_.write('%s\n' % msg)
from jira_rt_completion_server.jira_completer import JiraCompleter, JiraCompleterMatcherKey
class Source(Base):
def __init__(self, vim):
Base.__init__(self, vim)
self._completer = JiraCompleterMatcherKey('~/.cache/jira/jira.candidates.tsv')
#self.debug_enabled = True
self.name = 'jira'
#self.kind = 'keyword'
self.mark = '[JIRA]'
#self.min_pattern_length = 2
# Use these options if you want to filter candidates yourself
#self.is_volatile = True
#self.matchers = [] # ['matcher_cpsm']
#self.sorters = []
# Use these options if you want to implement custom matcher
#self.matchers = ['matcher_fuzzy', 'matcher_full_fuzzy']
#self.sorters = ['sorter_rank']
#self.converters = []
self.max_menu_width = 150
self.max_abbr_width = 150
self.input_pattern = self._completer.input_pattern #r'JI:?\w*$' #self._source.input_pattern
self.matcher_key = 'custom_key'
def get_complete_position(self, context):
return self._completer.get_complete_position(context)
def gather_candidates(self, context):
return self._completer.gather_candidates(context)
# class Source(Base):
# def __init__(self, vim):
# Base.__init__(self, vim)
#
# self._completer = JiraCompleter('~/.cache/jira/jira.candidates.tsv')
#
# self.debug_enabled = True
# self.name = 'jira'
# #self.kind = 'keyword'
# self.mark = '[JIRA]'
# #self.min_pattern_length = 2
#
# # Use these options if you want to filter candidates yourself
# self.is_volatile = True
# self.matchers = [] # ['matcher_cpsm']
# self.sorters = []
#
# # Use these options if you want to implement custom matcher
# #self.matchers = ['matcher_fuzzy', 'matcher_full_fuzzy']
# #self.sorters = ['sorter_rank']
# #self.converters = []
#
# self.max_menu_width = 150
# self.max_abbr_width = 150
# self.input_pattern = self._completer.input_pattern #r'JI:?\w*$' #self._source.input_pattern
#
# def get_complete_position(self, context):
# return self._completer.get_complete_position(context)
#
# def gather_candidates(self, context):
# return self._completer.gather_candidates(context)
#
# def on_post_filter(self, context):
# return self._completer.on_post_filter(context)
| 33.045977
| 101
| 0.649391
| 349
| 2,875
| 5.111748
| 0.309456
| 0.065583
| 0.047646
| 0.058857
| 0.737668
| 0.720852
| 0.720852
| 0.720852
| 0.720852
| 0.720852
| 0
| 0.007689
| 0.230957
| 2,875
| 86
| 102
| 33.430233
| 0.799186
| 0.641391
| 0
| 0
| 0
| 0
| 0.082816
| 0.056936
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.105263
| 0.105263
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
a1861dfa2b218dbd1c5872acd336f1816508fe90
| 254
|
py
|
Python
|
trapper/common/testing_utils/pytest_fixtures/training.py
|
cemilcengiz/trapper
|
8233a444be388bace032bdd5fd5cf87a64424cd5
|
[
"MIT"
] | 36
|
2021-11-01T19:29:31.000Z
|
2022-02-25T15:19:08.000Z
|
trapper/common/testing_utils/pytest_fixtures/training.py
|
cemilcengiz/trapper
|
8233a444be388bace032bdd5fd5cf87a64424cd5
|
[
"MIT"
] | 7
|
2021-11-01T14:33:21.000Z
|
2022-03-22T09:01:36.000Z
|
trapper/common/testing_utils/pytest_fixtures/training.py
|
cemilcengiz/trapper
|
8233a444be388bace032bdd5fd5cf87a64424cd5
|
[
"MIT"
] | 4
|
2021-11-30T00:34:20.000Z
|
2022-03-31T21:06:30.000Z
|
import pytest
@pytest.fixture(scope="module")
def temp_output_dir(tmpdir_factory):
return str(tmpdir_factory.mktemp("outputs"))
@pytest.fixture(scope="module")
def temp_result_dir(tmpdir_factory):
return str(tmpdir_factory.mktemp("results"))
| 21.166667
| 48
| 0.771654
| 34
| 254
| 5.529412
| 0.5
| 0.276596
| 0.191489
| 0.255319
| 0.797872
| 0.797872
| 0.468085
| 0.468085
| 0
| 0
| 0
| 0
| 0.094488
| 254
| 11
| 49
| 23.090909
| 0.817391
| 0
| 0
| 0.285714
| 0
| 0
| 0.102362
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a19b62e2bd11aeb30ddf4f74ad713d089b5f4421
| 118
|
py
|
Python
|
packages/python/plotly/plotly/api/v1.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
packages/python/plotly/plotly/api/v1.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
packages/python/plotly/plotly/api/v1.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
from __future__ import absolute_import
from _plotly_future_ import _chart_studio_error
_chart_studio_error("api.v1")
| 23.6
| 47
| 0.872881
| 17
| 118
| 5.235294
| 0.588235
| 0.269663
| 0.359551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.084746
| 118
| 4
| 48
| 29.5
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
62e052a2ae9b60eb0bce5e804a517df3f55fe3ed
| 5,394
|
py
|
Python
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_clock.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_clock.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_clock.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_clock(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def clock_sa_clock_timezone(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
clock_sa = ET.SubElement(config, "clock-sa", xmlns="urn:brocade.com:mgmt:brocade-clock")
clock = ET.SubElement(clock_sa, "clock")
timezone = ET.SubElement(clock, "timezone")
timezone.text = kwargs.pop('timezone')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
input = ET.SubElement(show_clock, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_output_clock_time_rbridge_id_out(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
output = ET.SubElement(show_clock, "output")
clock_time = ET.SubElement(output, "clock-time")
rbridge_id_out = ET.SubElement(clock_time, "rbridge-id-out")
rbridge_id_out.text = kwargs.pop('rbridge_id_out')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_output_clock_time_current_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
output = ET.SubElement(show_clock, "output")
clock_time = ET.SubElement(output, "clock-time")
current_time = ET.SubElement(clock_time, "current-time")
current_time.text = kwargs.pop('current_time')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_output_clock_time_timezone(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
output = ET.SubElement(show_clock, "output")
clock_time = ET.SubElement(output, "clock-time")
timezone = ET.SubElement(clock_time, "timezone")
timezone.text = kwargs.pop('timezone')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def clock_sa_clock_timezone(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
clock_sa = ET.SubElement(config, "clock-sa", xmlns="urn:brocade.com:mgmt:brocade-clock")
clock = ET.SubElement(clock_sa, "clock")
timezone = ET.SubElement(clock, "timezone")
timezone.text = kwargs.pop('timezone')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
input = ET.SubElement(show_clock, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_output_clock_time_rbridge_id_out(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
output = ET.SubElement(show_clock, "output")
clock_time = ET.SubElement(output, "clock-time")
rbridge_id_out = ET.SubElement(clock_time, "rbridge-id-out")
rbridge_id_out.text = kwargs.pop('rbridge_id_out')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_output_clock_time_current_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
output = ET.SubElement(show_clock, "output")
clock_time = ET.SubElement(output, "clock-time")
current_time = ET.SubElement(clock_time, "current-time")
current_time.text = kwargs.pop('current_time')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_clock_output_clock_time_timezone(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_clock = ET.Element("show_clock")
config = show_clock
output = ET.SubElement(show_clock, "output")
clock_time = ET.SubElement(output, "clock-time")
timezone = ET.SubElement(clock_time, "timezone")
timezone.text = kwargs.pop('timezone')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 37.2
| 96
| 0.621246
| 616
| 5,394
| 5.215909
| 0.068182
| 0.112045
| 0.084034
| 0.074697
| 0.959228
| 0.959228
| 0.959228
| 0.959228
| 0.959228
| 0.959228
| 0
| 0
| 0.255284
| 5,394
| 145
| 97
| 37.2
| 0.799851
| 0.062477
| 0
| 0.959184
| 1
| 0
| 0.127149
| 0.013595
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112245
| false
| 0
| 0.010204
| 0
| 0.234694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7fb8b1849ad85e60169c576036ab84aa31cb9cb
| 114
|
py
|
Python
|
oleh/__init__.py
|
juan-fdz-hawa/oleh
|
06bca04b27ed830590e2ddcc30f012ada5657b76
|
[
"MIT"
] | null | null | null |
oleh/__init__.py
|
juan-fdz-hawa/oleh
|
06bca04b27ed830590e2ddcc30f012ada5657b76
|
[
"MIT"
] | null | null | null |
oleh/__init__.py
|
juan-fdz-hawa/oleh
|
06bca04b27ed830590e2ddcc30f012ada5657b76
|
[
"MIT"
] | null | null | null |
from oleh.unpacker import Unpacker
def unpack(ole_object_bytes):
return Unpacker(ole_object_bytes).unpack()
| 19
| 46
| 0.798246
| 16
| 114
| 5.4375
| 0.625
| 0.206897
| 0.321839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 114
| 5
| 47
| 22.8
| 0.87
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
c514d768de0ebdefad35e2f9d00ce8882d61c877
| 289
|
py
|
Python
|
src/grokui/admin/__init__.py
|
zopefoundation/grokui.admin
|
7ea19321de7a7fd67667b97ace10d67bd1799376
|
[
"ZPL-2.1"
] | null | null | null |
src/grokui/admin/__init__.py
|
zopefoundation/grokui.admin
|
7ea19321de7a7fd67667b97ace10d67bd1799376
|
[
"ZPL-2.1"
] | 2
|
2018-10-31T08:17:47.000Z
|
2022-03-16T07:29:42.000Z
|
src/grokui/admin/__init__.py
|
zopefoundation/grokui.admin
|
7ea19321de7a7fd67667b97ace10d67bd1799376
|
[
"ZPL-2.1"
] | null | null | null |
##############################################################################
from grokui.admin.interfaces import ISecurityNotifier
from grokui.admin.utilities import getURLWithParams, getVersion
from grokui.admin.utilities import (
TimeoutableHTTPConnection, TimeoutableHTTPHandler)
| 48.166667
| 78
| 0.626298
| 20
| 289
| 9.05
| 0.55
| 0.165746
| 0.248619
| 0.265193
| 0.331492
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069204
| 289
| 5
| 79
| 57.8
| 0.672862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3d70eed6f10679229a9bc09d71460e3a6bd13ce8
| 18,720
|
py
|
Python
|
test/wrapper/system_tests/multiport/multiport.py
|
SRCH2/srch2-ngn
|
925f36971aa6a8b31cdc59f7992790169e97ee00
|
[
"BSD-3-Clause"
] | 14
|
2016-01-15T20:26:54.000Z
|
2018-11-26T20:47:43.000Z
|
test/wrapper/system_tests/multiport/multiport.py
|
SRCH2/srch2-ngn
|
925f36971aa6a8b31cdc59f7992790169e97ee00
|
[
"BSD-3-Clause"
] | 2
|
2016-04-26T05:29:01.000Z
|
2016-05-07T00:13:38.000Z
|
test/wrapper/system_tests/multiport/multiport.py
|
SRCH2/srch2-ngn
|
925f36971aa6a8b31cdc59f7992790169e97ee00
|
[
"BSD-3-Clause"
] | 7
|
2016-02-27T11:35:59.000Z
|
2018-11-26T20:47:59.000Z
|
#! /usr/bin/python
# Test case to test multi-port functionality
# The configuration file for this test case specifies 2 different cores, each with a different
# data source. Three search terms are tested, each expected to be returned by one and only one
# of the cores. The usual syntax of the queriesAndResults.txt file has been extended to the
# following format:
# <search-term>||<core1 ID result set>@<core2 ID result set>@<core3 ID result set>
# where each ID result set is a space separated list of record IDs expected from the server.
# Specifically:
#
# Global ports:
# /info -> 8088
# /[other entrypoints] -> 8087
#
# Core 1: Movies, using global ports
# /info -> 8088
# /[other entrypoints] -> 8087
#
# Core 2: StackOverflow data
# /save -> 9087
# /export -> 9087
# /resetLogger -> 9087
# /docs -> 9087
# /update -> 9087
#
# In the test case, we send HTTP requests to those core-ports. Based on the configuration, some of
# the requests should succeed, and some should fail.
#
import sys, urllib2, json, time, subprocess, os, commands, signal, re
sys.path.insert(0, 'srch2lib')
import test_lib
port = '8087' # core1
core1InfoPort = '8088' # core1 - /info
core2ControlPort = '9087' # core2 - all the control messages
#Function of checking the results
def checkResult(query, responseJson,resultValue):
# for key, value in responseJson:
# print key, value
isPass=1
if len(responseJson) == len(resultValue):
for i in range(0, len(resultValue)):
#print response_json['results'][i]['record']['id']
if (resultValue.count(responseJson[i]['record']['id']) != 1):
isPass=0
print query+' test failed'
print 'query results||given results'
print 'number of results:'+str(len(responseJson))+'||'+str(len(resultValue))
for i in range(0, len(responseJson)):
print str(responseJson[i]['record']['id']) + '||' + resultValue[i]
break
else:
isPass=0
print query+' test failed - differing response lengths'
print 'query results||given results'
print 'number of results:'+str(len(responseJson))+'||'+str(len(resultValue))
maxLen = max(len(responseJson),len(resultValue))
for i in range(0, maxLen):
if i >= len(resultValue):
print str(responseJson[i]['record']['id'])+'||'
elif i >= len(responseJson):
print ' '+'||'+resultValue[i]
else:
print responseJson[i]['record']['id']+'||'+resultValue[i]
if isPass == 1:
print query+' test pass'
return 0
return 1
#prepare the query based on the valid syntax
def prepareQuery(queryKeywords, fuzzy):
query = ''
################# prepare main query part
query = query + 'q='
# local parameters
# query = query + '%7BdefaultPrefixComplete=COMPLETE%7D'
# keywords section
for i in range(0, len(queryKeywords)):
if fuzzy:
keyword = queryKeywords[i] + '~'
else:
keyword = queryKeywords[i]
if i == (len(queryKeywords)-1):
query=query+keyword # last keyword prefix
else:
query=query+keyword+'%20AND%20'
# print 'Query : ' + query
##################################
return query
def testMultipleCores(queriesAndResultsPath, binary_path):
if test_lib.confirmPortAvailable(port) == False:
print 'Port ' + str(port) + ' already in use - aborting'
return -1
#Start the engine server
args = [ binary_path, '--config-file=./multiport/conf-multiport.xml' ]
if test_lib.confirmPortAvailable(port) == False:
print 'Port ' + str(port) + ' already in use - aborting'
return -1
print 'starting engine: ' + args[0] + ' ' + args[1]
serverHandle = test_lib.startServer(args)
test_lib.pingServer(port)
failCount = 0
#######################################
# Basic multi-core functional testing #
#######################################
print "Test suite #1 - basic multi-core functionality"
f_in = open(queriesAndResultsPath, 'r')
for line in f_in:
#get the query keyword and results
value=line.split('||')
queryValue=value[0].split()
allResults=value[1].split('@')
coreNum=0
for coreResult in allResults:
resultValue=coreResult.split()
#construct the query
if coreNum == 0:
# test default core (unnamed core) on 0th iteration
query='http://localhost:' + port + '/search?'
else:
query='http://localhost:' + port + '/core' + str(coreNum) + '/search?'
query = query + prepareQuery(queryValue, False)
#do the query
response = urllib2.urlopen(query).read()
#print query + ' Got ==> ' + response
response_json = json.loads(response)
#check the result
failCount += checkResult(query, response_json['results'], resultValue)
coreNum += 1
f_in.close()
print "\nTest suite #2: Port security"
# Test if /info is indeed moved to another port
query='http://localhost:' + core1InfoPort + '/info'
#do the query
#print query
response = urllib2.urlopen(query).read()
#print response
response_json = json.loads(response)
if len(response_json) > 0:
if int(response_json['engine_status']['docs_in_index']) != 244:
failCount += 1
print "Info request did not return expected document count: Got " + str(response_json['engine_status']['docs_in_index']) + " but expected 244."
else:
print query + ' test pass'
else:
failCount += 1
print "Null response to info request"
# Test if /info is no longer on standard port (negative test)
query='http://localhost:' + port + '/info'
#do the query
#print query
try:
response = urllib2.urlopen(query).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# Test if /search is not allowed in the /info port
query='http://localhost:' + core1InfoPort + '/search?q=foo'
#do the query
#print query
try:
response = urllib2.urlopen(query).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# Same tests but with core1 explicitly in the path
# Test if /core1/info is indeed moved to another port
query='http://localhost:' + core1InfoPort + '/core1/info'
#do the query
#print query
response = urllib2.urlopen(query).read()
#print response
response_json = json.loads(response)
if len(response_json) > 0:
if int(response_json['engine_status']['docs_in_index']) != 244:
failCount += 1
print "Info request did not return expected document count: Got " + str(response_json['engine_status']['docs_in_index']) + " but expected 244."
else:
print query + ' test pass'
else:
failCount += 1
print "Null response to info request"
# Test if /core1/info is no longer on standard port (negative test)
query='http://localhost:' + port + '/core1/info'
#do the query
#print query
try:
response = urllib2.urlopen(query).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# Test if /search is not allowed in the /core1/info port
query='http://localhost:' + core1InfoPort + '/core1/search?q=foo'
#do the query
#print query
try:
response = urllib2.urlopen(query).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# Test if /core2/info is not allowed in the /core1/info port
query='http://localhost:' + core1InfoPort + '/core2/info'
#do the query
#print query
try:
response = urllib2.urlopen(query).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
print "\nTest suite #3: Control Port security"
# /save test
query='http://localhost:' + core2ControlPort + '/core2/save'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
response = opener.open(request).read()
# response = urllib2.urlopen(request).read()
#print response
response_json = json.loads(response)
if len(response_json) > 0:
if response_json['log'][0]['save'] != 'success':
failCount += 1
print "/save request did not return success"
else:
print query + ' test pass'
else:
failCount += 1
print "Null response to info request"
# /export
query='http://localhost:' + core2ControlPort + '/core2/export?exported_data_file=core2-exported.json'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
response = opener.open(request).read()
# response = urllib2.urlopen(request).read()
#print response
response_json = json.loads(response)
if len(response_json) > 0:
if response_json['log'][0]['export'] != 'success':
failCount += 1
print "/export request did not return success"
else:
print query + ' test pass'
else:
failCount += 1
print "Null response to save request"
# /resetLogger test
query='http://localhost:' + core2ControlPort + '/core2/resetLogger'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
response = opener.open(request).read()
# response = urllib2.urlopen(request).read()
#print response
response_json = json.loads(response)
if len(response_json) > 0:
if response_json['log']:
print query + ' test pass'
else:
failCount += 1
print "/resetLogger request did not return success"
else:
failCount += 1
print "Null response to resetLogger request"
# /core2/save on protected port test
query='http://localhost:' + port + '/core2/save'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
try:
response = opener.open(request).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# /core2/export on protected port test
query='http://localhost:' + port + '/core2/export?exported_data_file=core2-exported.json'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
try:
response = opener.open(request).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# /core2/resetLogger on protected port test
query='http://localhost:' + port + '/core2/resetLogger'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
try:
response = opener.open(request).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# /core2/save on protected port test
query='http://localhost:' + core1InfoPort + '/core2/save'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
try:
response = opener.open(request).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# /core2/export on protected port test
query='http://localhost:' + core1InfoPort + '/core2/export?exported_data_file=core2-exported.json'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
try:
response = opener.open(request).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
# /core2/resetLogger on protected port test
query='http://localhost:' + core1InfoPort + '/core2/resetLogger'
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(query, '')
#request.add_header('Content-Type', 'your/contenttype')
request.get_method = lambda: 'PUT'
#do the query
#print query
try:
response = opener.open(request).read()
#print response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failcount += 1
raise
print "\nTest suite #4 - Port security"
f_in = open(queriesAndResultsPath, 'r')
for line in f_in:
#get the query keyword and results
value=line.split('||')
queryValue=value[0].split()
allResults=value[1].split('@')
coreNum=0
for coreResult in allResults:
resultValue=coreResult.split()
#construct the query
if coreNum == 0:
# test default core (unnamed core) on 0th iteration
query='http://localhost:' + core1InfoPort + '/search?'
else:
query='http://localhost:' + core1InfoPort + '/core' + str(coreNum) + '/search?'
query = query + prepareQuery(queryValue, False)
try:
#do the query
response = urllib2.urlopen(query).read()
#print query + ' Got ==> ' + response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failCount += 1
raise
coreNum += 1
f_in.close()
f_in = open(queriesAndResultsPath, 'r')
for line in f_in:
#get the query keyword and results
value=line.split('||')
queryValue=value[0].split()
allResults=value[1].split('@')
coreNum=0
for coreResult in allResults:
resultValue=coreResult.split()
#construct the query
if coreNum == 0:
# test default core (unnamed core) on 0th iteration
query='http://localhost:' + core2ControlPort + '/search?'
else:
query='http://localhost:' + core2ControlPort + '/core' + str(coreNum) + '/search?'
query = query + prepareQuery(queryValue, False)
try:
#do the query
response = urllib2.urlopen(query).read()
#print query + ' Got ==> ' + response
response_json = json.loads(response)
except urllib2.HTTPError as err:
if err.code == 404:
print query + ' test pass'
else:
# did not get expected file not found error
failCount += 1
raise
coreNum += 1
f_in.close()
test_lib.killServer(serverHandle)
print '=============================='
return failCount
if __name__ == '__main__':
#Path of the query file
#each line like "trust||01c90b4effb2353742080000" ---- query||record_ids(results)
binary_path = sys.argv[1]
queriesAndResultsPath = sys.argv[2]
exitCode = testMultipleCores(queriesAndResultsPath, binary_path)
os._exit(exitCode)
| 34.538745
| 155
| 0.585417
| 2,099
| 18,720
| 5.170081
| 0.128633
| 0.039624
| 0.036491
| 0.031515
| 0.776723
| 0.761242
| 0.731939
| 0.727976
| 0.714983
| 0.689458
| 0
| 0.023164
| 0.298932
| 18,720
| 541
| 156
| 34.602588
| 0.803718
| 0.240011
| 0
| 0.743284
| 0
| 0
| 0.148932
| 0.016492
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.068657
| 0.00597
| null | null | 0.140299
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
3dc64f5de660a9d3a581daea99f1f4384aad2e14
| 65,054
|
py
|
Python
|
appengine/machine_provider/handlers_endpoints_test.py
|
stefb965/luci-py
|
e0a8a5640c4104e5c90781d833168aa8a8d1f24d
|
[
"Apache-2.0"
] | 1
|
2017-10-30T15:08:10.000Z
|
2017-10-30T15:08:10.000Z
|
appengine/machine_provider/handlers_endpoints_test.py
|
stefb965/luci-py
|
e0a8a5640c4104e5c90781d833168aa8a8d1f24d
|
[
"Apache-2.0"
] | null | null | null |
appengine/machine_provider/handlers_endpoints_test.py
|
stefb965/luci-py
|
e0a8a5640c4104e5c90781d833168aa8a8d1f24d
|
[
"Apache-2.0"
] | 1
|
2020-07-05T19:54:40.000Z
|
2020-07-05T19:54:40.000Z
|
#!/usr/bin/env python
# Copyright 2015 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Unit tests for handlers_endpoints.py."""
import datetime
import json
import unittest
import test_env
test_env.setup_test_env()
import endpoints
from google.appengine import runtime
from google.appengine.ext import ndb
from protorpc.remote import protojson
import webtest
from components import auth_testing
from components import utils
from components.machine_provider import rpc_messages
from test_support import test_case
import acl
import handlers_endpoints
import models
def rpc_to_json(rpc_message):
"""Converts the given RPC message to a POSTable JSON dict.
Args:
rpc_message: A protorpc.message.Message instance.
Returns:
A string representing a JSON dict.
"""
return json.loads(protojson.encode_message(rpc_message))
def jsonish_dict_to_rpc(dictionary, rpc_message_type):
"""Converts the given dict to the specified RPC message type.
Args:
dictionary: A dict instance containing only values which can be
encoded as JSON.
rpc_message_type: A type inheriting from protorpc.message.Message.
Returns:
An object of type rpc_message_type.
"""
return protojson.decode_message(rpc_message_type, json.dumps(dictionary))
class CatalogTest(test_case.EndpointsTestCase):
"""Tests for handlers_endpoints.CatalogEndpoints."""
api_service_cls = handlers_endpoints.CatalogEndpoints
def setUp(self):
super(CatalogTest, self).setUp()
app = handlers_endpoints.create_endpoints_app()
self.app = webtest.TestApp(app)
def mock_get_current_backend(self, backend=rpc_messages.Backend.DUMMY):
self.mock(acl, 'get_current_backend', lambda *args, **kwargs: backend)
def test_get(self):
models.CatalogMachineEntry(
key=models.CatalogMachineEntry._generate_key('DUMMY', 'fake-host'),
dimensions=rpc_messages.Dimensions(hostname='fake-host'),
lease_expiration_ts=utils.utcnow(),
).put()
request = rpc_to_json(rpc_messages.CatalogMachineRetrievalRequest(
hostname='fake-host',
))
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('get', request).json,
rpc_messages.CatalogMachineRetrievalResponse,
)
self.assertEqual(response.dimensions.hostname, 'fake-host')
self.assertTrue(response.lease_expiration_ts)
def test_get_mismatched_backend(self):
models.CatalogMachineEntry(
key=models.CatalogMachineEntry._generate_key('DUMMY', 'fake-host'),
dimensions=rpc_messages.Dimensions(hostname='fake-host'),
).put()
request = rpc_to_json(rpc_messages.CatalogMachineRetrievalRequest(
backend=rpc_messages.Backend.GCE,
hostname='fake-host',
))
self.mock_get_current_backend()
jsonish_dict_to_rpc(
self.call_api('get', request, status=403).json,
rpc_messages.CatalogMachineRetrievalResponse,
)
def test_get_backend_unspecified_by_admin(self):
self.mock(acl, 'is_catalog_admin', lambda *args, **kwargs: True)
models.CatalogMachineEntry(
key=models.CatalogMachineEntry._generate_key('DUMMY', 'fake-host'),
dimensions=rpc_messages.Dimensions(hostname='fake-host'),
).put()
request = rpc_to_json(rpc_messages.CatalogMachineRetrievalRequest(
hostname='fake-host',
))
jsonish_dict_to_rpc(
self.call_api('get', request, status=400).json,
rpc_messages.CatalogMachineRetrievalResponse,
)
def test_get_not_found(self):
request = rpc_to_json(rpc_messages.CatalogMachineRetrievalRequest(
hostname='fake-host',
))
self.mock_get_current_backend()
jsonish_dict_to_rpc(
self.call_api('get', request, status=404).json,
rpc_messages.CatalogMachineRetrievalResponse,
)
def test_add(self):
request = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_topic='fake-topic',
),
))
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('add_machine', request).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertFalse(response.error)
def test_mismatched_backend(self):
request = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.GCE,
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_topic='fake-topic',
),
))
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('add_machine', request).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertEqual(
response.error,
rpc_messages.CatalogManipulationRequestError.MISMATCHED_BACKEND,
)
def test_add_backend_unspecified_by_admin(self):
self.mock(acl, 'is_catalog_admin', lambda *args, **kwargs: True)
request = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_topic='fake-topic',
),
))
response = jsonish_dict_to_rpc(
self.call_api('add_machine', request).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertEqual(
response.error,
rpc_messages.CatalogManipulationRequestError.UNSPECIFIED_BACKEND,
)
def test_add_no_hostname(self):
request = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('add_machine', request).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertEqual(
response.error,
rpc_messages.CatalogManipulationRequestError.UNSPECIFIED_HOSTNAME,
)
def test_add_duplicate(self):
request_1 = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
request_2 = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
self.mock_get_current_backend()
response_1 = jsonish_dict_to_rpc(
self.call_api('add_machine', request_1).json,
rpc_messages.CatalogManipulationResponse,
)
response_2 = jsonish_dict_to_rpc(
self.call_api('add_machine', request_2).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertFalse(response_1.error)
self.assertEqual(
response_2.error,
rpc_messages.CatalogManipulationRequestError.HOSTNAME_REUSE,
)
def test_add_batch_empty(self):
request = rpc_to_json(rpc_messages.CatalogMachineBatchAdditionRequest())
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('add_machines', request).json,
rpc_messages.CatalogBatchManipulationResponse,
)
self.assertFalse(response.responses)
def test_add_batch(self):
request = rpc_to_json(rpc_messages.CatalogMachineBatchAdditionRequest(
requests=[
rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host-1',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
),
rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host-2',
os_family=rpc_messages.OSFamily.WINDOWS,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
),
rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host-1',
os_family=rpc_messages.OSFamily.OSX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
),
],
))
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('add_machines', request).json,
rpc_messages.CatalogBatchManipulationResponse,
)
self.assertEqual(len(response.responses), 3)
self.assertFalse(response.responses[0].error)
self.assertFalse(response.responses[1].error)
self.assertEqual(
response.responses[2].error,
rpc_messages.CatalogManipulationRequestError.HOSTNAME_REUSE,
)
def test_add_batch_error(self):
request = rpc_to_json(rpc_messages.CatalogMachineBatchAdditionRequest(
requests=[
rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.GCE,
hostname='fake-host-1',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
),
],
))
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('add_machines', request).json,
rpc_messages.CatalogBatchManipulationResponse,
)
self.assertEqual(len(response.responses), 1)
self.assertEqual(
response.responses[0].error,
rpc_messages.CatalogManipulationRequestError.MISMATCHED_BACKEND,
)
def test_delete(self):
request_1 = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
request_2 = rpc_to_json(rpc_messages.CatalogMachineDeletionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
))
request_3 = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host',
os_family=rpc_messages.OSFamily.WINDOWS,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
self.mock_get_current_backend()
response_1 = jsonish_dict_to_rpc(
self.call_api('add_machine', request_1).json,
rpc_messages.CatalogManipulationResponse,
)
response_2 = jsonish_dict_to_rpc(
self.call_api('delete_machine', request_2).json,
rpc_messages.CatalogManipulationResponse,
)
response_3 = jsonish_dict_to_rpc(
self.call_api('add_machine', request_3).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertFalse(response_1.error)
self.assertFalse(response_2.error)
self.assertFalse(response_3.error)
def test_delete_error(self):
request = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.GCE,
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('delete_machine', request).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertEqual(
response.error,
rpc_messages.CatalogManipulationRequestError.MISMATCHED_BACKEND,
)
def test_delete_leased(self):
request = rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
)
key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(request.dimensions),
dimensions=request.dimensions,
lease_id='lease-id',
).put()
request = rpc_to_json(request)
self.mock_get_current_backend()
response = jsonish_dict_to_rpc(
self.call_api('delete_machine', request).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertEqual(
response.error,
rpc_messages.CatalogManipulationRequestError.LEASED,
)
self.assertTrue(key.get())
def test_delete_invalid(self):
request_1 = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host-1',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
request_2 = rpc_to_json(rpc_messages.CatalogMachineDeletionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host-2',
os_family=rpc_messages.OSFamily.LINUX,
),
))
request_3 = rpc_to_json(rpc_messages.CatalogMachineAdditionRequest(
dimensions=rpc_messages.Dimensions(
hostname='fake-host-1',
os_family=rpc_messages.OSFamily.LINUX,
),
policies=rpc_messages.Policies(
backend_project='fake-project',
backend_topic='fake-topic',
),
))
self.mock_get_current_backend()
response_1 = jsonish_dict_to_rpc(
self.call_api('add_machine', request_1).json,
rpc_messages.CatalogManipulationResponse,
)
response_2 = jsonish_dict_to_rpc(
self.call_api('delete_machine', request_2).json,
rpc_messages.CatalogManipulationResponse,
)
response_3 = jsonish_dict_to_rpc(
self.call_api('add_machine', request_3).json,
rpc_messages.CatalogManipulationResponse,
)
self.assertFalse(response_1.error)
self.assertEqual(
response_2.error,
rpc_messages.CatalogManipulationRequestError.ENTRY_NOT_FOUND,
)
self.assertEqual(
response_3.error,
rpc_messages.CatalogManipulationRequestError.HOSTNAME_REUSE,
)
class MachineTest(test_case.EndpointsTestCase):
"""Tests for handlers_endpoints.MachineEndpoints."""
api_service_cls = handlers_endpoints.MachineEndpoints
def setUp(self):
super(MachineTest, self).setUp()
app = handlers_endpoints.create_endpoints_app()
self.app = webtest.TestApp(app)
def test_update_instruction_state_not_found(self):
machine_key = ndb.Key(models.CatalogMachineEntry, 'fake-machine')
with self.assertRaises(endpoints.NotFoundException):
handlers_endpoints.MachineEndpoints._update_instruction_state(
machine_key, models.InstructionStates.EXECUTED)
self.failIf(machine_key.get())
def test_update_instruction_state_no_instruction(self):
machine_key = models.CatalogMachineEntry(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
).put()
handlers_endpoints.MachineEndpoints._update_instruction_state(
machine_key, models.InstructionStates.EXECUTED)
self.failIf(machine_key.get().instruction)
def test_update_instruction_state_already_updated(self):
machine_key = models.CatalogMachineEntry(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
instruction=models.Instruction(
state=models.InstructionStates.EXECUTED
),
).put()
handlers_endpoints.MachineEndpoints._update_instruction_state(
machine_key, models.InstructionStates.EXECUTED)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.EXECUTED)
def test_update_instruction_state_invalid_new_state(self):
machine_key = models.CatalogMachineEntry(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
instruction=models.Instruction(
state=models.InstructionStates.EXECUTED
),
).put()
handlers_endpoints.MachineEndpoints._update_instruction_state(
machine_key, models.InstructionStates.PENDING)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.EXECUTED)
def test_update_instruction_state_invalid_transition(self):
machine_key = models.CatalogMachineEntry(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
instruction=models.Instruction(
state=models.InstructionStates.EXECUTED
),
).put()
handlers_endpoints.MachineEndpoints._update_instruction_state(
machine_key, models.InstructionStates.RECEIVED)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.EXECUTED)
def test_update_instruction_state(self):
machine_key = models.CatalogMachineEntry(
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
instruction=models.Instruction(
state=models.InstructionStates.PENDING
),
).put()
handlers_endpoints.MachineEndpoints._update_instruction_state(
machine_key, models.InstructionStates.RECEIVED)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.RECEIVED)
def test_poll_anonymous(self):
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
with self.assertRaises(webtest.app.AppError):
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
def test_poll_backend_omitted(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
with self.assertRaises(webtest.app.AppError):
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
def test_poll_entry_not_found(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
with self.assertRaises(webtest.app.AppError):
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
def test_poll_unauthorized(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
with self.assertRaises(webtest.app.AppError):
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
def test_poll_not_leased(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
self.failIf(response.instruction)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.PENDING)
def test_poll_no_instruction(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
self.failIf(response.instruction)
self.failIf(machine_key.get().instruction)
def test_poll_expired(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow(),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
self.failIf(response.instruction)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.PENDING)
def test_poll_no_lease(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
self.failIf(response.instruction)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.PENDING)
def test_poll_no_lease_released(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
released=True,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
self.failIf(response.instruction)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.PENDING)
def test_poll_implied_backend(self):
def is_group_member(group):
return group == 'machine-provider-dummy-backend'
self.mock(acl.auth, 'is_group_member', is_group_member)
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
self.assertEqual(response.instruction.swarming_server, 'example.com')
self.assertEqual(response.state, models.InstructionStates.PENDING)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.PENDING)
def test_poll(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.PollRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.PENDING,
),
lease_expiration_ts=utils.utcnow() + datetime.timedelta(hours=24),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
models.LeaseRequest(
id='fake-id',
deduplication_checksum='checksum',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
).put()
response = jsonish_dict_to_rpc(
self.call_api('poll', request).json,
rpc_messages.PollResponse,
)
self.assertEqual(response.instruction.swarming_server, 'example.com')
self.assertEqual(response.state, models.InstructionStates.PENDING)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.RECEIVED)
def test_ack_entry_not_found(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.AckRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
with self.assertRaises(webtest.app.AppError):
self.call_api('ack', request)
def test_ack_unauthorized(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.AckRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.RECEIVED,
),
lease_id='fake-id',
policies=rpc_messages.Policies(),
).put()
with self.assertRaises(webtest.app.AppError):
self.call_api('ack', request)
def test_ack_no_instruction(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.AckRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
with self.assertRaises(webtest.app.AppError):
self.call_api('ack', request)
def test_ack(self):
auth_testing.mock_get_current_identity(self)
request = rpc_to_json(rpc_messages.AckRequest(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
))
dimensions = rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
hostname='fake-host',
)
machine_key = models.CatalogMachineEntry(
key=models.CatalogMachineEntry.generate_key(dimensions),
dimensions=dimensions,
instruction=models.Instruction(
instruction=rpc_messages.Instruction(swarming_server='example.com'),
state=models.InstructionStates.RECEIVED,
),
lease_id='fake-id',
policies=rpc_messages.Policies(
machine_service_account=auth_testing.DEFAULT_MOCKED_IDENTITY.name,
),
).put()
self.call_api('ack', request)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.EXECUTED)
class MachineProviderReleaseTest(test_case.EndpointsTestCase):
"""Tests for handlers_endpoints.MachineProviderEndpoints.release."""
api_service_cls = handlers_endpoints.MachineProviderEndpoints
def setUp(self):
super(MachineProviderReleaseTest, self).setUp()
app = handlers_endpoints.create_endpoints_app()
self.app = webtest.TestApp(app)
def test_release(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
self.mock(
handlers_endpoints.MachineProviderEndpoints,
'_release',
lambda *args, **kwargs: None,
)
request = rpc_to_json(rpc_messages.LeaseReleaseRequest(
request_id='request-id',
))
response = jsonish_dict_to_rpc(
self.call_api('release', request).json,
rpc_messages.LeaseReleaseResponse,
)
self.assertEqual(response.client_request_id, 'request-id')
self.assertFalse(response.error)
class MachineProviderBatchedReleaseTest(test_case.EndpointsTestCase):
"""Tests for handlers_endpoints.MachineProviderEndpoints.batched_release."""
api_service_cls = handlers_endpoints.MachineProviderEndpoints
def setUp(self):
super(MachineProviderBatchedReleaseTest, self).setUp()
app = handlers_endpoints.create_endpoints_app()
self.app = webtest.TestApp(app)
def test_batch(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
ts = utils.utcnow()
self.mock(utils, 'utcnow', lambda *args, **kwargs: ts)
release_requests = rpc_to_json(rpc_messages.BatchedLeaseReleaseRequest(
requests=[
rpc_messages.LeaseReleaseRequest(
request_id='request-id',
),
],
))
release_responses = jsonish_dict_to_rpc(
self.call_api('batched_release', release_requests).json,
rpc_messages.BatchedLeaseReleaseResponse,
)
self.assertEqual(len(release_responses.responses), 1)
self.assertEqual(
release_responses.responses[0].client_request_id, 'request-id')
self.assertEqual(
release_responses.responses[0].error,
rpc_messages.LeaseReleaseRequestError.NOT_FOUND,
)
def test_deadline_exceeded(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
class utcnow(object):
def __init__(self, init_ts):
self.last_ts = init_ts
def __call__(self, *args, **kwargs):
self.last_ts = self.last_ts + datetime.timedelta(seconds=60)
return self.last_ts
self.mock(utils, 'utcnow', utcnow(utils.utcnow()))
release_requests = rpc_to_json(rpc_messages.BatchedLeaseReleaseRequest(
requests=[
rpc_messages.LeaseReleaseRequest(
request_id='request-id',
),
],
))
release_responses = jsonish_dict_to_rpc(
self.call_api('batched_release', release_requests).json,
rpc_messages.BatchedLeaseReleaseResponse,
)
self.assertEqual(len(release_responses.responses), 1)
self.assertEqual(
release_responses.responses[0].client_request_id, 'request-id')
self.assertEqual(
release_responses.responses[0].error,
rpc_messages.LeaseReleaseRequestError.DEADLINE_EXCEEDED,
)
def test_exception(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
ts = utils.utcnow()
self.mock(utils, 'utcnow', lambda *args, **kwargs: ts)
def _release(*args, **kwargs):
raise runtime.apiproxy_errors.CancelledError
self.mock(handlers_endpoints.MachineProviderEndpoints, '_release', _release)
release_requests = rpc_to_json(rpc_messages.BatchedLeaseReleaseRequest(
requests=[
rpc_messages.LeaseReleaseRequest(
request_id='request-id',
),
],
))
release_responses = jsonish_dict_to_rpc(
self.call_api('batched_release', release_requests).json,
rpc_messages.BatchedLeaseReleaseResponse,
)
self.assertEqual(len(release_responses.responses), 1)
self.assertEqual(
release_responses.responses[0].client_request_id, 'request-id')
self.assertEqual(
release_responses.responses[0].error,
rpc_messages.LeaseReleaseRequestError.TRANSIENT_ERROR,
)
class MachineProviderBatchedLeaseTest(test_case.EndpointsTestCase):
"""Tests for handlers_endpoints.MachineProviderEndpoints.batched_lease."""
api_service_cls = handlers_endpoints.MachineProviderEndpoints
def setUp(self):
super(MachineProviderBatchedLeaseTest, self).setUp()
app = handlers_endpoints.create_endpoints_app()
self.app = webtest.TestApp(app)
def test_batch(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
ts = utils.utcnow()
self.mock(utils, 'utcnow', lambda *args, **kwargs: ts)
lease_requests = rpc_to_json(rpc_messages.BatchedLeaseRequest(requests=[
rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=1,
request_id='request-id',
),
]))
lease_responses = jsonish_dict_to_rpc(
self.call_api('batched_lease', lease_requests).json,
rpc_messages.BatchedLeaseResponse,
)
self.assertEqual(len(lease_responses.responses), 1)
self.assertEqual(
lease_responses.responses[0].client_request_id, 'request-id')
self.assertFalse(lease_responses.responses[0].error)
def test_deadline_exceeded(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
class utcnow(object):
def __init__(self, init_ts):
self.last_ts = init_ts
def __call__(self, *args, **kwargs):
self.last_ts = self.last_ts + datetime.timedelta(seconds=60)
return self.last_ts
self.mock(utils, 'utcnow', utcnow(utils.utcnow()))
lease_requests = rpc_to_json(rpc_messages.BatchedLeaseRequest(requests=[
rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=1,
request_id='request-id',
),
]))
lease_responses = jsonish_dict_to_rpc(
self.call_api('batched_lease', lease_requests).json,
rpc_messages.BatchedLeaseResponse,
)
self.assertEqual(len(lease_responses.responses), 1)
self.assertEqual(
lease_responses.responses[0].client_request_id, 'request-id')
self.assertEqual(
lease_responses.responses[0].error,
rpc_messages.LeaseRequestError.DEADLINE_EXCEEDED,
)
def test_exception(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
ts = utils.utcnow()
self.mock(utils, 'utcnow', lambda *args, **kwargs: ts)
def _lease(*args, **kwargs):
raise runtime.apiproxy_errors.CancelledError
self.mock(handlers_endpoints.MachineProviderEndpoints, '_lease', _lease)
lease_requests = rpc_to_json(rpc_messages.BatchedLeaseRequest(requests=[
rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=1,
request_id='request-id',
),
]))
lease_responses = jsonish_dict_to_rpc(
self.call_api('batched_lease', lease_requests).json,
rpc_messages.BatchedLeaseResponse,
)
self.assertEqual(len(lease_responses.responses), 1)
self.assertEqual(
lease_responses.responses[0].client_request_id, 'request-id')
self.assertEqual(
lease_responses.responses[0].error,
rpc_messages.LeaseRequestError.TRANSIENT_ERROR,
)
class MachineProviderLeaseTest(test_case.EndpointsTestCase):
"""Tests for handlers_endpoints.MachineProviderEndpoints.lease."""
api_service_cls = handlers_endpoints.MachineProviderEndpoints
def setUp(self):
super(MachineProviderLeaseTest, self).setUp()
app = handlers_endpoints.create_endpoints_app()
self.app = webtest.TestApp(app)
def test_lease_duration(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=1,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertFalse(lease_response.error)
def test_lease_duration_zero(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=0,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertEqual(
lease_response.error,
rpc_messages.LeaseRequestError.LEASE_LENGTH_UNSPECIFIED,
)
def test_lease_duration_negative(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=-1,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertEqual(
lease_response.error,
rpc_messages.LeaseRequestError.NONPOSITIVE_DEADLINE,
)
def test_lease_duration_too_long(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=9999999999,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertEqual(
lease_response.error,
rpc_messages.LeaseRequestError.LEASE_TOO_LONG,
)
def test_lease_duration_and_lease_expiration_ts(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
duration=1,
lease_expiration_ts=int(utils.time_time()) + 3600,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertEqual(
lease_response.error,
rpc_messages.LeaseRequestError.MUTUAL_EXCLUSION_ERROR,
)
def test_lease_timestamp(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
lease_expiration_ts=int(utils.time_time()) + 3600,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertFalse(lease_response.error)
def test_lease_timestamp_passed(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
lease_expiration_ts=1,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertEqual(
lease_response.error,
rpc_messages.LeaseRequestError.LEASE_EXPIRATION_TS_ERROR,
)
def test_lease_timestamp_too_far(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.LINUX,
),
lease_expiration_ts=9999999999,
request_id='abc',
))
lease_response = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertEqual(
lease_response.error,
rpc_messages.LeaseRequestError.LEASE_TOO_LONG,
)
def test_duplicate(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.OSX,
),
duration=3,
request_id='asdf',
))
lease_response_1 = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
lease_response_2 = jsonish_dict_to_rpc(
self.call_api('lease', lease_request).json,
rpc_messages.LeaseResponse,
)
self.assertFalse(lease_response_1.error)
self.assertFalse(lease_response_2.error)
self.assertEqual(
lease_response_1.request_hash,
lease_response_2.request_hash,
)
def test_request_id_reuse(self):
def is_group_member(group):
return group == 'machine-provider-users'
self.mock(acl.auth, 'is_group_member', is_group_member)
lease_request_1 = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.WINDOWS,
),
duration=7,
request_id='qwerty',
))
lease_request_2 = rpc_to_json(rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(
os_family=rpc_messages.OSFamily.WINDOWS,
),
duration=189,
request_id='qwerty',
))
lease_response_1 = jsonish_dict_to_rpc(
self.call_api('lease', lease_request_1).json,
rpc_messages.LeaseResponse,
)
lease_response_2 = jsonish_dict_to_rpc(
self.call_api('lease', lease_request_2).json,
rpc_messages.LeaseResponse,
)
self.assertFalse(lease_response_1.error)
self.assertEqual(
lease_response_2.error,
rpc_messages.LeaseRequestError.REQUEST_ID_REUSE,
)
self.assertNotEqual(
lease_response_1.request_hash,
lease_response_2.request_hash,
)
class MachineProviderInstructTest(test_case.EndpointsTestCase):
"""Tests for handlers_endpoints.MachineProviderEndpoints.instruct."""
api_service_cls = handlers_endpoints.MachineProviderEndpoints
def setUp(self):
super(MachineProviderInstructTest, self).setUp()
app = handlers_endpoints.create_endpoints_app()
self.app = webtest.TestApp(app)
def test_lease_request_not_found(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
swarming_server='example.com',
),
)
machine_key = models.CatalogMachineEntry(
id='machine',
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
lease_expiration_ts=datetime.datetime.fromtimestamp(9999999999),
lease_id=ndb.Key(models.LeaseRequest, 'id').id(),
).put()
request = rpc_to_json(request)
with self.assertRaises(webtest.app.AppError):
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
self.failIf(machine_key.get().instruction)
def test_lease_request_not_fulfilled(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
swarming_server='example.com',
),
)
lease_key = models.LeaseRequest(
key=models.LeaseRequest.generate_key(
auth_testing.DEFAULT_MOCKED_IDENTITY.to_bytes(),
request,
),
deduplication_checksum='checksum',
machine_id='machine',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
response=rpc_messages.LeaseResponse(
client_request_id='request-id',
state=rpc_messages.LeaseRequestState.UNTRIAGED,
),
).put()
machine_key = models.CatalogMachineEntry(
id='machine',
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
lease_expiration_ts=datetime.datetime.fromtimestamp(9999999999),
lease_id=lease_key.id(),
).put()
request = rpc_to_json(request)
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
self.assertEqual(
response.error, rpc_messages.MachineInstructionError.NOT_FULFILLED)
self.failIf(machine_key.get().instruction)
def test_lease_request_already_reclaimed(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
swarming_server='example.com',
),
)
lease_key = models.LeaseRequest(
key=models.LeaseRequest.generate_key(
auth_testing.DEFAULT_MOCKED_IDENTITY.to_bytes(),
request,
),
deduplication_checksum='checksum',
machine_id='machine',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
response=rpc_messages.LeaseResponse(
client_request_id='request-id',
state=rpc_messages.LeaseRequestState.FULFILLED,
),
).put()
machine_key = models.CatalogMachineEntry(
id='machine',
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
lease_expiration_ts=datetime.datetime.fromtimestamp(9999999999),
lease_id=lease_key.id(),
).put()
request = rpc_to_json(request)
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
self.assertEqual(
response.error, rpc_messages.MachineInstructionError.ALREADY_RECLAIMED)
self.failIf(machine_key.get().instruction)
def test_machine_not_found(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
swarming_server='example.com',
),
)
models.LeaseRequest(
key=models.LeaseRequest.generate_key(
auth_testing.DEFAULT_MOCKED_IDENTITY.to_bytes(),
request,
),
deduplication_checksum='checksum',
machine_id='machine',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
response=rpc_messages.LeaseResponse(
client_request_id='request-id',
hostname='fake-host',
state=rpc_messages.LeaseRequestState.FULFILLED,
),
).put()
request = rpc_to_json(request)
with self.assertRaises(webtest.app.AppError):
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
def test_machine_not_fulfilled(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
swarming_server='example.com',
),
)
models.LeaseRequest(
key=models.LeaseRequest.generate_key(
auth_testing.DEFAULT_MOCKED_IDENTITY.to_bytes(),
request,
),
deduplication_checksum='checksum',
machine_id='machine',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
response=rpc_messages.LeaseResponse(
client_request_id='request-id',
hostname='fake-host',
state=rpc_messages.LeaseRequestState.FULFILLED,
),
).put()
machine_key = models.CatalogMachineEntry(
id='machine',
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
lease_expiration_ts=datetime.datetime.fromtimestamp(9999999999),
).put()
request = rpc_to_json(request)
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
self.assertEqual(
response.error, rpc_messages.MachineInstructionError.NOT_FULFILLED)
self.failIf(machine_key.get().instruction)
def test_machine_already_reclaimed(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
swarming_server='example.com',
),
)
lease_key = models.LeaseRequest(
key=models.LeaseRequest.generate_key(
auth_testing.DEFAULT_MOCKED_IDENTITY.to_bytes(),
request,
),
deduplication_checksum='checksum',
machine_id='machine',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
response=rpc_messages.LeaseResponse(
client_request_id='request-id',
hostname='fake-host',
state=rpc_messages.LeaseRequestState.FULFILLED,
),
).put()
machine_key = models.CatalogMachineEntry(
id='machine',
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
lease_expiration_ts=datetime.datetime.fromtimestamp(1),
lease_id=lease_key.id(),
).put()
request = rpc_to_json(request)
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
self.assertEqual(
response.error, rpc_messages.MachineInstructionError.ALREADY_RECLAIMED)
self.failIf(machine_key.get().instruction)
def test_invalid_instruction(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
),
)
lease_key = models.LeaseRequest(
key=models.LeaseRequest.generate_key(
auth_testing.DEFAULT_MOCKED_IDENTITY.to_bytes(),
request,
),
deduplication_checksum='checksum',
machine_id='machine',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
response=rpc_messages.LeaseResponse(
client_request_id='request-id',
hostname='fake-host',
state=rpc_messages.LeaseRequestState.FULFILLED,
),
).put()
machine_key = models.CatalogMachineEntry(
id='machine',
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
lease_expiration_ts=datetime.datetime.fromtimestamp(9999999999),
lease_id=lease_key.id(),
).put()
request = rpc_to_json(request)
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
self.assertEqual(
response.error,
rpc_messages.MachineInstructionError.INVALID_INSTRUCTION,
)
self.failIf(machine_key.get().instruction)
def test_instructed(self):
def is_group_member(group):
return group == 'machine-provider-users'
auth_testing.mock_get_current_identity(self)
self.mock(acl.auth, 'is_group_member', is_group_member)
request = rpc_messages.MachineInstructionRequest(
request_id='request-id',
instruction=rpc_messages.Instruction(
swarming_server='example.com',
),
)
lease_key = models.LeaseRequest(
key=models.LeaseRequest.generate_key(
auth_testing.DEFAULT_MOCKED_IDENTITY.to_bytes(),
request,
),
deduplication_checksum='checksum',
machine_id='machine',
owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
request=rpc_messages.LeaseRequest(
dimensions=rpc_messages.Dimensions(),
request_id='request-id',
),
response=rpc_messages.LeaseResponse(
client_request_id='request-id',
hostname='fake-host',
state=rpc_messages.LeaseRequestState.FULFILLED,
),
).put()
machine_key = models.CatalogMachineEntry(
id='machine',
dimensions=rpc_messages.Dimensions(
backend=rpc_messages.Backend.DUMMY,
),
lease_expiration_ts=datetime.datetime.fromtimestamp(9999999999),
lease_id=lease_key.id(),
).put()
request = rpc_to_json(request)
response = jsonish_dict_to_rpc(
self.call_api('instruct', request).json,
rpc_messages.MachineInstructionResponse,
)
self.failIf(response.error)
self.assertEqual(
machine_key.get().instruction.instruction.swarming_server,
'example.com',
)
self.assertEqual(
machine_key.get().instruction.state, models.InstructionStates.PENDING)
if __name__ == '__main__':
unittest.main()
| 33.68928
| 80
| 0.678713
| 6,810
| 65,054
| 6.184435
| 0.043465
| 0.1016
| 0.03989
| 0.056677
| 0.925943
| 0.915495
| 0.909868
| 0.90018
| 0.889614
| 0.881114
| 0
| 0.004274
| 0.223076
| 65,054
| 1,930
| 81
| 33.706736
| 0.829013
| 0.016586
| 0
| 0.830389
| 0
| 0
| 0.056452
| 0.009077
| 0
| 0
| 0
| 0
| 0.053004
| 1
| 0.061249
| false
| 0.000589
| 0.009423
| 0.015312
| 0.097762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a8d5047aa6b45cb3f631b17cc6dd5cc67bd9f10
| 29,808
|
py
|
Python
|
experiment2.py
|
MartinHeroux/J_Physiol_grasp_illusion_2017
|
6d58ce6129e88ad13911d2373491cdc76317d821
|
[
"MIT"
] | null | null | null |
experiment2.py
|
MartinHeroux/J_Physiol_grasp_illusion_2017
|
6d58ce6129e88ad13911d2373491cdc76317d821
|
[
"MIT"
] | null | null | null |
experiment2.py
|
MartinHeroux/J_Physiol_grasp_illusion_2017
|
6d58ce6129e88ad13911d2373491cdc76317d821
|
[
"MIT"
] | null | null | null |
import pandas as pd
from math import sqrt
import matplotlib.pyplot as plt
import cumming_plot
######################################
# IMPORT DATA, TIDY, DESCRIPTIVE STATS
######################################
# Import data
exp2 = pd.read_csv('exp2_data.txt', index_col=0)
# Convert gender and handedness to category
exp2.loc[:, 'gender'] = exp2.loc[:, 'gender'].astype('category')
exp2.loc[:, 'handedness'] = exp2.loc[:, 'handedness'].astype('category')
# Calculate difference scores condition - control
# Temperature - spacing
exp2['cold_start_diff_sp'] = exp2.cold_start_sp - exp2.temp_start_sp
exp2['hot_start_diff_sp'] = exp2.hot_start_sp - exp2.temp_start_sp
exp2['cold_end_diff_sp'] = exp2.cold_end_sp - exp2.temp_end_sp
exp2['hot_end_diff_sp'] = exp2.hot_end_sp - exp2.temp_end_sp
exp2['cold_time_diff_sp'] = exp2.cold_end_diff_sp - exp2.cold_start_diff_sp
exp2['hot_time_diff_sp'] = exp2.hot_end_diff_sp - exp2.hot_start_diff_sp
# Temperature - ownership
exp2['cold_start_diff_own'] = exp2.cold_start_own - exp2.temp_start_own
exp2['hot_start_diff_own'] = exp2.hot_start_own - exp2.temp_start_own
exp2['cold_end_diff_own'] = exp2.cold_end_own - exp2.temp_end_own
exp2['hot_end_diff_own'] = exp2.hot_end_own - exp2.temp_end_own
exp2['cold_time_diff_own'] = exp2.cold_end_diff_own - exp2.cold_start_diff_own
exp2['hot_time_diff_own'] = exp2.hot_end_diff_own - exp2.hot_start_diff_own
# texture - spacing
exp2['smooth_start_diff_sp'] = exp2.smooth_start_sp - exp2.text_start_sp
exp2['rough_start_diff_sp'] = exp2.rough_start_sp - exp2.text_start_sp
exp2['smooth_end_diff_sp'] = exp2.smooth_end_sp - exp2.text_end_sp
exp2['rough_end_diff_sp'] = exp2.rough_end_sp - exp2.text_end_sp
exp2['rough_time_diff_sp'] = exp2.rough_end_diff_sp - exp2.rough_start_diff_sp
exp2['smooth_time_diff_sp'] = exp2.smooth_end_diff_sp - exp2.smooth_start_diff_sp
# texture - ownership
exp2['smooth_start_diff_own'] = exp2.smooth_start_own - exp2.text_start_own
exp2['rough_start_diff_own'] = exp2.rough_start_own - exp2.text_start_own
exp2['smooth_end_diff_own'] = exp2.smooth_end_own - exp2.text_end_own
exp2['rough_end_diff_own'] = exp2.rough_end_own - exp2.text_end_own
exp2['rough_time_diff_own'] = exp2.rough_end_diff_own - exp2.rough_start_diff_own
exp2['smooth_time_diff_own'] = exp2.smooth_end_diff_own - exp2.smooth_start_diff_own
# Shape - spacing
exp2['square_start_diff_sp'] = exp2.square_start_sp - exp2.shape_start_sp
exp2['odd_start_diff_sp'] = exp2.odd_start_sp - exp2.shape_start_sp
exp2['square_end_diff_sp'] = exp2.square_end_sp - exp2.shape_end_sp
exp2['odd_end_diff_sp'] = exp2.odd_end_sp - exp2.shape_end_sp
exp2['odd_time_diff_sp'] = exp2.odd_end_diff_sp - exp2.odd_start_diff_sp
exp2['square_time_diff_sp'] = exp2.square_end_diff_sp - exp2.square_start_diff_sp
# Shape - ownership
exp2['square_start_diff_own'] = exp2.square_start_own - exp2.shape_start_own
exp2['odd_start_diff_own'] = exp2.odd_start_own - exp2.shape_start_own
exp2['square_end_diff_own'] = exp2.square_end_own - exp2.shape_end_own
exp2['odd_end_diff_own'] = exp2.odd_end_own - exp2.shape_end_own
exp2['odd_time_diff_own'] = exp2.odd_end_diff_own - exp2.odd_start_diff_own
exp2['square_time_diff_own'] = exp2.square_end_diff_own - exp2.square_start_diff_own
# Firmness - spacing
exp2['firm_start_diff_sp'] = exp2.firm_start_sp - exp2.firmness_start_sp
exp2['soft_start_diff_sp'] = exp2.soft_start_sp - exp2.firmness_start_sp
exp2['firm_end_diff_sp'] = exp2.firm_end_sp - exp2.firmness_end_sp
exp2['soft_end_diff_sp'] = exp2.soft_end_sp - exp2.firmness_end_sp
exp2['firm_time_diff_sp'] = exp2.firm_end_diff_sp - exp2.firm_start_diff_sp
exp2['soft_time_diff_sp'] = exp2.soft_end_diff_sp - exp2.soft_start_diff_sp
# Firmness - ownership
exp2['firm_start_diff_own'] = exp2.firm_start_own - exp2.firmness_start_own
exp2['soft_start_diff_own'] = exp2.soft_start_own - exp2.firmness_start_own
exp2['firm_end_diff_own'] = exp2.firm_end_own - exp2.firmness_end_own
exp2['soft_end_diff_own'] = exp2.soft_end_own - exp2.firmness_end_own
exp2['firm_time_diff_own'] = exp2.firm_end_diff_own - exp2.firm_start_diff_own
exp2['soft_time_diff_own'] = exp2.soft_end_diff_own - exp2.soft_start_diff_own
# Loop through items in dataframe and calculate basic summary statistics
txt = ['{:<15} {}'.format('Male', sum(exp2.gender == 'male')),
'{:<15} {}'.format('Female', sum(exp2.gender == 'female')),
'{:<15} {}'.format('Right handed', sum(exp2.handedness == 'right')),
'{:<15} {}'.format('Left handed', sum(exp2.handedness == 'left')),
'{:<15} mean = {:>4.1f} SD = {:>3.1f} min = {:>2.0f} '
'max = {:>2.0f}'.format('age', exp2['age'].mean(),
exp2['age'].std(), exp2['age'].min(), exp2['age'].max())]
for line in txt:
print(line)
for loop, line in enumerate(txt):
if loop == 0:
flag = 'w'
else:
flag = 'a'
with open('exp2_results.txt', flag) as file:
file.write(line)
file.write('\n')
with open('exp2_results.txt', flag) as file:
file.write('\n')
file.write('='*7)
file.write('\nSPACING\n')
file.write('='*7)
file.write('\n'*2)
for i in exp2:
if exp2[i].dtypes == 'float64':
txt = '{:>22} count = {:<2.0f} mean = {:>4.1f} SD = {:>3.1f} 95% MoE = {:>4.2f} 95%CI = {:>5.2f} to {:>5.2f} min = {:>2.0f} ' \
'max = {:>2.0f}'.format(i, exp2[i].count(), exp2[i].mean(), exp2[i].std(), (exp2[i].std() / sqrt(len(exp2[i]))) * 1.96,
exp2[i].mean() - (exp2[i].std() / sqrt(len(exp2[i]))) * 1.96,
exp2[i].mean() + (exp2[i].std() / sqrt(len(exp2[i]))) * 1.96,
exp2[i].min(), exp2[i].max())
print(txt)
with open('exp2_results.txt', 'a') as file:
file.write(txt)
file.write('\n')
with open('exp2_results.txt', flag) as file:
file.write('\n')
file.write('='*9)
file.write('\nOWNERSHIP\n')
file.write('='*9)
file.write('\n'*2)
for i in exp2:
if exp2[i].dtypes == 'int64':
if not i == 'age':
txt = '{:>22} count = {:<2.0f} mean = {:>4.1f} SD = {:>3.1f} 95% MoE = {:>3.2f} 95%CI = {:>5.2f} to {:>5.2f}' \
' min = {:>2.0f} max = {:>2.0f}'.format(i, exp2[i].count(), exp2[i].mean(), exp2[i].std(),
(exp2[i].std() / sqrt(len(exp2[i]))) * 1.96,
exp2[i].mean() - (exp2[i].std() / sqrt(len(exp2[i]))) * 1.96,
exp2[i].mean() + (exp2[i].std() / sqrt(len(exp2[i]))) * 1.96,
exp2[i].min(), exp2[i].max())
print(txt)
with open('exp2_results.txt', 'a') as file:
file.write(txt)
file.write('\n')
with open('exp2_results.txt', 'a') as file:
file.write('\n\n{:8} = {}'.format('sp', 'perceived spacing'))
file.write('\n{:8} = {}'.format('own', 'perceived ownership'))
file.write('\n{:8} = {}'.format('95% MoE','margin of error (one side of error bar) for 95% confidence interval'))
file.write('\n{:8} = {}'.format('diff', 'difference, calculated as end - start'))
file.write('\n{:8} = {}'.format('temp', 'control trial for temperature'))
file.write('\n{:8} = {}'.format('text', 'control trial for texture'))
file.write('\n{:8} = {}'.format('shape', 'control trial for shape'))
file.write('\n{:8} = {}'.format('firmness', 'control trial for firmness'))
######################
# FIGURE 5 Temperature
######################
fig = plt.figure(figsize=[5, 4])
style1 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style2 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style3 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style4 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
marker_size = [2, 4]
markeredgewdith = 0.4
linewidth = 1
axes_tick_width = .5
font_size = 8
letterfontsize = 10
connectcolor = '0.8'
x_spacing = [0.01, 0.015, 0.05, 0.055, 0.075]
jit = 0.0001
skip_raw_marker = True
x_axis_nudge = [-0.005, -0.005, -.005]
zero_line2 = False
ax2yticks = [-1, 2, 5]
# Subplot 2,2,1 no grasp: start-end
data = [list(exp2.hot_start_diff_sp), list(exp2.hot_end_diff_sp)]
ax1 = fig.add_subplot(2, 2, 1)
cumming_plot.paired(data, ax1,
yticks=[-12, 12, 4],
style=style1,
ylabel='Difference \nperceived spacing (cm)',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'A',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax1.transAxes)
plt.text(0.5, 1.08, 'Hot',
horizontalalignment='center',
fontsize=font_size,
transform=ax1.transAxes)
# Subplot 2,2,2 grasp: start-end
data = [list(exp2.cold_start_diff_sp), list(exp2.cold_end_diff_sp)]
ax2 = fig.add_subplot(2, 2, 2)
cumming_plot.paired(data, ax2,
yticks=[-12, 12, 4],
style=style2,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Cold',
horizontalalignment='center',
fontsize=font_size,
transform=ax2.transAxes)
# Subplot 2,2,3 start: no-grasp grasp
data = [list(exp2.hot_start_diff_own), list(exp2.hot_end_diff_own)]
ax3 = fig.add_subplot(2, 2, 3)
cumming_plot.paired(data, ax3,
yticks=[-8, 6, 2],
style=style3,
ylabel='Difference ownership',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'B',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax3.transAxes)
plt.text(0.5, 1.08, 'Hot',
horizontalalignment='center',
fontsize=font_size,
transform=ax3.transAxes)
# Subplot 2,2,4 end: no-grasp grasp
data = [list(exp2.cold_start_diff_own), list(exp2.cold_end_diff_own)]
ax4 = fig.add_subplot(2, 2, 4)
cumming_plot.paired(data, ax4,
yticks=[-8, 6, 2],
style=style4,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Cold',
horizontalalignment='center',
fontsize=font_size,
transform=ax4.transAxes)
# Adjust spacing of subplots
left = 0.15
right = 0.9
bottom = 0.05
top = 0.92
wspace = 0.5 # the amount of width reserved for blank space between subplots
hspace = 0.5 # the amount of height reserved for white space between subplots
fig.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace)
#plt.savefig('figure5.pdf', format='pdf', dpi=600)
plt.savefig('figure5.png', format='png', dpi=600)
plt.savefig('figure5.svg', format='svg')
#####################
# FIGURE 6 Compliance
#####################
fig = plt.figure(figsize=[5, 4])
style1 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style2 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style3 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style4 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
marker_size = [2, 4]
markeredgewdith = 1
linewidth = 1
axes_tick_width = .5
font_size = 8
letterfontsize = 10
connectcolor = '0.8'
x_spacing = [0.01, 0.015, 0.05, 0.055, 0.075]
jit = 0.0001
skip_raw_marker = True
x_axis_nudge = [-0.005, -0.005, -.005]
zero_line2 = False
ax2yticks = [-1, 2, 5]
# Subplot 2,2,1 no grasp: start-end
data = [list(exp2.soft_start_diff_sp), list(exp2.soft_end_diff_sp)]
ax1 = fig.add_subplot(2, 2, 1)
cumming_plot.paired(data, ax1,
yticks=[-10, 6, 2],
style=style1,
ylabel='Difference \nperceived spacing (cm)',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'A',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax1.transAxes)
plt.text(0.5, 1.08, 'Soft',
horizontalalignment='center',
fontsize=font_size,
transform=ax1.transAxes)
# Subplot 2,2,2 grasp: start-end
data = [list(exp2.firm_start_diff_sp), list(exp2.firm_end_diff_sp)]
ax2 = fig.add_subplot(2, 2, 2)
cumming_plot.paired(data, ax2,
yticks=[-10, 6, 2],
style=style2,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Firm',
horizontalalignment='center',
fontsize=font_size,
transform=ax2.transAxes)
# Subplot 2,2,3 start: no-grasp grasp
data = [list(exp2.soft_start_diff_own), list(exp2.soft_end_diff_own)]
ax3 = fig.add_subplot(2, 2, 3)
cumming_plot.paired(data, ax3,
yticks=[-8, 6, 2],
style=style3,
ylabel='Difference ownership',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'B',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax3.transAxes)
plt.text(0.5, 1.08, 'Soft',
horizontalalignment='center',
fontsize=font_size,
transform=ax3.transAxes)
# Subplot 2,2,4 end: no-grasp grasp
data = [list(exp2.firm_start_diff_own), list(exp2.firm_end_diff_own)]
ax4 = fig.add_subplot(2, 2, 4)
cumming_plot.paired(data, ax4,
yticks=[-8, 6, 2],
style=style4,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Firm',
horizontalalignment='center',
fontsize=font_size,
transform=ax4.transAxes)
# Adjust spacing of subplots
left = 0.15
right = 0.9
bottom = 0.05
top = 0.92
wspace = 0.5 # the amount of width reserved for blank space between subplots
hspace = 0.5 # the amount of height reserved for white space between subplots
fig.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace)
#plt.savefig('figure6.pdf', format='pdf', dpi=600)
plt.savefig('figure6.png', format='png', dpi=600)
plt.savefig('figure6.svg', format='svg')
##################
# FIGURE 7 Texture
##################
fig = plt.figure(figsize=[5, 4])
style1 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style2 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style3 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style4 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
marker_size = [2, 4]
markeredgewdith = 0.4
linewidth = 1
axes_tick_width = .5
font_size = 8
letterfontsize = 10
connectcolor = '0.8'
x_spacing = [0.01, 0.015, 0.05, 0.055, 0.075]
jit = 0.0001
skip_raw_marker = True
x_axis_nudge = [-0.005, -0.005, -.005]
zero_line2 = False
ax2yticks = [-1, 2, 5]
# Subplot 2,2,1 no grasp: start-end
data = [list(exp2.rough_start_diff_sp), list(exp2.rough_end_diff_sp)]
ax1 = fig.add_subplot(2, 2, 1)
cumming_plot.paired(data, ax1,
yticks=[-12, 16, 4],
style=style1,
ylabel='Difference \nperceived spacing (cm)',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'A',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax1.transAxes)
plt.text(0.5, 1.08, 'Rough',
horizontalalignment='center',
fontsize=font_size,
transform=ax1.transAxes)
# Subplot 2,2,2 grasp: start-end
data = [list(exp2.smooth_start_diff_sp), list(exp2.smooth_end_diff_sp)]
ax2 = fig.add_subplot(2, 2, 2)
cumming_plot.paired(data, ax2,
yticks=[-12, 16, 4],
style=style2,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Smooth',
horizontalalignment='center',
fontsize=font_size,
transform=ax2.transAxes)
# Subplot 2,2,3 start: no-grasp grasp
data = [list(exp2.rough_start_diff_own), list(exp2.rough_end_diff_own)]
ax3 = fig.add_subplot(2, 2, 3)
cumming_plot.paired(data, ax3,
yticks=[-6, 6, 2],
style=style3,
ylabel='Difference ownership',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'B',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax3.transAxes)
plt.text(0.5, 1.08, 'Rough',
horizontalalignment='center',
fontsize=font_size,
transform=ax3.transAxes)
# Subplot 2,2,4 end: no-grasp grasp
data = [list(exp2.smooth_start_diff_own), list(exp2.smooth_end_diff_own)]
ax4 = fig.add_subplot(2, 2, 4)
cumming_plot.paired(data, ax4,
yticks=[-6, 6, 2],
style=style4,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Smooth',
horizontalalignment='center',
fontsize=font_size,
transform=ax4.transAxes)
# Adjust spacing of subplots
left = 0.15
right = 0.9
bottom = 0.05
top = 0.92
wspace = 0.5 # the amount of width reserved for blank space between subplots
hspace = 0.5 # the amount of height reserved for white space between subplots
fig.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace)
#plt.savefig('figure7.pdf', format='pdf', dpi=600)
plt.savefig('figure7.png', format='png', dpi=600)
plt.savefig('figure7.svg', format='svg')
################
# FIGURE 8 Shape
################
fig = plt.figure(figsize=[5, 4])
style1 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style2 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style3 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
style4 = {'a': ['^', 'w', 'k'], 'b': ['^', 'w', 'k'], 'diff': ['^', 'k', 'k']}
marker_size = [2, 4]
markeredgewdith = 0.4
linewidth = 1
axes_tick_width = .5
font_size = 8
letterfontsize = 10
connectcolor = '0.8'
x_spacing = [0.01, 0.015, 0.05, 0.055, 0.075]
jit = 0.0001
skip_raw_marker = True
x_axis_nudge = [-0.005, -0.005, -.005]
zero_line2 = False
ax2yticks = [-1, 2, 5]
# Subplot 2,2,1 no grasp: start-end
data = [list(exp2.odd_start_diff_sp), list(exp2.odd_end_diff_sp)]
ax1 = fig.add_subplot(2, 2, 1)
cumming_plot.paired(data, ax1,
yticks=[-12, 12, 4],
style=style1,
ylabel='Difference \nperceived spacing (cm)',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'A',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax1.transAxes)
plt.text(0.5, 1.08, 'Odd',
horizontalalignment='center',
fontsize=font_size,
transform=ax1.transAxes)
# Subplot 2,2,2 grasp: start-end
data = [list(exp2.square_start_diff_sp), list(exp2.square_end_diff_sp)]
ax2 = fig.add_subplot(2, 2, 2)
cumming_plot.paired(data, ax2,
yticks=[-12, 12, 4],
style=style2,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Rectangular',
horizontalalignment='center',
fontsize=font_size,
transform=ax2.transAxes)
# Subplot 2,2,3 start: no-grasp grasp
data = [list(exp2.odd_start_diff_own), list(exp2.square_end_diff_own)]
ax3 = fig.add_subplot(2, 2, 3)
cumming_plot.paired(data, ax3,
yticks=[-6, 4, 2],
style=style3,
ylabel='Difference ownership',
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(-.2, 1.15, 'B',
horizontalalignment='center',
fontsize=letterfontsize,
transform=ax3.transAxes)
plt.text(0.5, 1.08, 'Odd',
horizontalalignment='center',
fontsize=font_size,
transform=ax3.transAxes)
# Subplot 2,2,4 end: no-grasp grasp
data = [list(exp2.square_start_diff_own), list(exp2.square_end_diff_own)]
ax4 = fig.add_subplot(2, 2, 4)
cumming_plot.paired(data, ax4,
yticks=[-6, 4, 2],
style=style4,
xlabel=['start', 'end', 'effect'],
zero_line=False,
y2ticks=True,
font_size=font_size,
marker_size=marker_size,
markeredgewidth=markeredgewdith,
axes_tick_width=axes_tick_width,
linewidth=linewidth,
connectcolor=connectcolor,
x_spacing=x_spacing,
jit=jit,
skip_raw_marker=skip_raw_marker,
x_axis_nudge=x_axis_nudge,
zero_line2=zero_line2)
plt.text(0.5, 1.08, 'Rectangular',
horizontalalignment='center',
fontsize=font_size,
transform=ax4.transAxes)
# Adjust spacing of subplots
left = 0.15
right = 0.9
bottom = 0.05
top = 0.92
wspace = 0.5 # the amount of width reserved for blank space between subplots
hspace = 0.5 # the amount of height reserved for white space between subplots
fig.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace)
#plt.savefig('figure8.pdf', format='pdf', dpi=600)
plt.savefig('figure8.png', format='png', dpi=600)
plt.savefig('figure8.svg', format='svg')
| 39.744
| 147
| 0.55626
| 3,732
| 29,808
| 4.201233
| 0.060021
| 0.026022
| 0.022961
| 0.013266
| 0.918681
| 0.871931
| 0.839275
| 0.778302
| 0.708272
| 0.704956
| 0
| 0.049752
| 0.296699
| 29,808
| 749
| 148
| 39.797063
| 0.698149
| 0.060386
| 0
| 0.821485
| 0
| 0.004739
| 0.106355
| 0.001515
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006319
| 0
| 0.006319
| 0.004739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b158536d0e6767e349d6d5c7bca179a35556d9fd
| 100
|
py
|
Python
|
examples/src/dbnd_examples/orchestration/dbnd_spark/scripts/__init__.py
|
busunkim96/dbnd
|
0191fdcd4c4fbd35006f1026d1a55b2abab9097b
|
[
"Apache-2.0"
] | 224
|
2020-01-02T10:46:37.000Z
|
2022-03-02T13:54:08.000Z
|
examples/src/dbnd_examples/orchestration/dbnd_spark/scripts/__init__.py
|
busunkim96/dbnd
|
0191fdcd4c4fbd35006f1026d1a55b2abab9097b
|
[
"Apache-2.0"
] | 16
|
2020-03-11T09:37:58.000Z
|
2022-01-26T10:22:08.000Z
|
examples/src/dbnd_examples/orchestration/dbnd_spark/scripts/__init__.py
|
busunkim96/dbnd
|
0191fdcd4c4fbd35006f1026d1a55b2abab9097b
|
[
"Apache-2.0"
] | 24
|
2020-03-24T13:53:50.000Z
|
2022-03-22T11:55:18.000Z
|
from dbnd import relative_path
def spark_script(*path):
return relative_path(__file__, *path)
| 16.666667
| 41
| 0.77
| 14
| 100
| 5
| 0.714286
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 100
| 5
| 42
| 20
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
b183cc2ea89313fa1651a7fbaae41d22a3294d49
| 42,903
|
py
|
Python
|
sdk/python/pulumi_openstack/networking/floating_ip.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2018-09-12T12:37:51.000Z
|
2022-02-04T19:32:13.000Z
|
sdk/python/pulumi_openstack/networking/floating_ip.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 72
|
2018-08-15T13:04:57.000Z
|
2022-03-31T15:39:49.000Z
|
sdk/python/pulumi_openstack/networking/floating_ip.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2019-03-14T08:28:49.000Z
|
2021-12-29T04:23:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['FloatingIpArgs', 'FloatingIp']
@pulumi.input_type
class FloatingIpArgs:
def __init__(__self__, *,
pool: pulumi.Input[str],
address: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
dns_domain: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
fixed_ip: Optional[pulumi.Input[str]] = None,
port_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
The set of arguments for constructing a FloatingIp resource.
:param pulumi.Input[str] pool: The name of the pool from which to obtain the floating
IP. Changing this creates a new floating IP.
:param pulumi.Input[str] address: The actual/specific floating IP to obtain. By default,
non-admin users are not able to specify a floating IP, so you must either be
an admin user or have had a custom policy or role applied to your OpenStack
user or project.
:param pulumi.Input[str] description: Human-readable description for the floating IP.
:param pulumi.Input[str] dns_domain: The floating IP DNS domain. Available, when Neutron
DNS extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] dns_name: The floating IP DNS name. Available, when Neutron DNS
extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] fixed_ip: Fixed IP of the port to associate with this floating IP. Required if
the port has multiple fixed IPs.
:param pulumi.Input[str] port_id: ID of an existing port with at least one IP address to
associate with this floating IP.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a floating IP that can be used with
another networking resource, such as a load balancer. If omitted, the
`region` argument of the provider is used. Changing this creates a new
floating IP (which may or may not have a different address).
:param pulumi.Input[str] subnet_id: The subnet ID of the floating IP pool. Specify this if
the floating IP network has multiple subnets.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A list of external subnet IDs to try over each to
allocate a floating IP address. If a subnet ID in a list has exhausted
floating IP pool, the next subnet ID will be tried. This argument is used only
during the resource creation. Conflicts with a `subnet_id` argument.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the floating IP.
:param pulumi.Input[str] tenant_id: The target tenant ID in which to allocate the floating
IP, if you specify this together with a port_id, make sure the target port
belongs to the same tenant. Changing this creates a new floating IP (which
may or may not have a different address)
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
pulumi.set(__self__, "pool", pool)
if address is not None:
pulumi.set(__self__, "address", address)
if description is not None:
pulumi.set(__self__, "description", description)
if dns_domain is not None:
pulumi.set(__self__, "dns_domain", dns_domain)
if dns_name is not None:
pulumi.set(__self__, "dns_name", dns_name)
if fixed_ip is not None:
pulumi.set(__self__, "fixed_ip", fixed_ip)
if port_id is not None:
pulumi.set(__self__, "port_id", port_id)
if region is not None:
pulumi.set(__self__, "region", region)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if subnet_ids is not None:
pulumi.set(__self__, "subnet_ids", subnet_ids)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if value_specs is not None:
pulumi.set(__self__, "value_specs", value_specs)
@property
@pulumi.getter
def pool(self) -> pulumi.Input[str]:
"""
The name of the pool from which to obtain the floating
IP. Changing this creates a new floating IP.
"""
return pulumi.get(self, "pool")
@pool.setter
def pool(self, value: pulumi.Input[str]):
pulumi.set(self, "pool", value)
@property
@pulumi.getter
def address(self) -> Optional[pulumi.Input[str]]:
"""
The actual/specific floating IP to obtain. By default,
non-admin users are not able to specify a floating IP, so you must either be
an admin user or have had a custom policy or role applied to your OpenStack
user or project.
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable description for the floating IP.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="dnsDomain")
def dns_domain(self) -> Optional[pulumi.Input[str]]:
"""
The floating IP DNS domain. Available, when Neutron
DNS extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
"""
return pulumi.get(self, "dns_domain")
@dns_domain.setter
def dns_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_domain", value)
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> Optional[pulumi.Input[str]]:
"""
The floating IP DNS name. Available, when Neutron DNS
extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
"""
return pulumi.get(self, "dns_name")
@dns_name.setter
def dns_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_name", value)
@property
@pulumi.getter(name="fixedIp")
def fixed_ip(self) -> Optional[pulumi.Input[str]]:
"""
Fixed IP of the port to associate with this floating IP. Required if
the port has multiple fixed IPs.
"""
return pulumi.get(self, "fixed_ip")
@fixed_ip.setter
def fixed_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fixed_ip", value)
@property
@pulumi.getter(name="portId")
def port_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of an existing port with at least one IP address to
associate with this floating IP.
"""
return pulumi.get(self, "port_id")
@port_id.setter
def port_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "port_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a floating IP that can be used with
another networking resource, such as a load balancer. If omitted, the
`region` argument of the provider is used. Changing this creates a new
floating IP (which may or may not have a different address).
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The subnet ID of the floating IP pool. Specify this if
the floating IP network has multiple subnets.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of external subnet IDs to try over each to
allocate a floating IP address. If a subnet ID in a list has exhausted
floating IP pool, the next subnet ID will be tried. This argument is used only
during the resource creation. Conflicts with a `subnet_id` argument.
"""
return pulumi.get(self, "subnet_ids")
@subnet_ids.setter
def subnet_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "subnet_ids", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of string tags for the floating IP.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The target tenant ID in which to allocate the floating
IP, if you specify this together with a port_id, make sure the target port
belongs to the same tenant. Changing this creates a new floating IP (which
may or may not have a different address)
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
@value_specs.setter
def value_specs(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "value_specs", value)
@pulumi.input_type
class _FloatingIpState:
def __init__(__self__, *,
address: Optional[pulumi.Input[str]] = None,
all_tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
dns_domain: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
fixed_ip: Optional[pulumi.Input[str]] = None,
pool: Optional[pulumi.Input[str]] = None,
port_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
Input properties used for looking up and filtering FloatingIp resources.
:param pulumi.Input[str] address: The actual/specific floating IP to obtain. By default,
non-admin users are not able to specify a floating IP, so you must either be
an admin user or have had a custom policy or role applied to your OpenStack
user or project.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_tags: The collection of tags assigned on the floating IP, which have
been explicitly and implicitly added.
:param pulumi.Input[str] description: Human-readable description for the floating IP.
:param pulumi.Input[str] dns_domain: The floating IP DNS domain. Available, when Neutron
DNS extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] dns_name: The floating IP DNS name. Available, when Neutron DNS
extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] fixed_ip: Fixed IP of the port to associate with this floating IP. Required if
the port has multiple fixed IPs.
:param pulumi.Input[str] pool: The name of the pool from which to obtain the floating
IP. Changing this creates a new floating IP.
:param pulumi.Input[str] port_id: ID of an existing port with at least one IP address to
associate with this floating IP.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a floating IP that can be used with
another networking resource, such as a load balancer. If omitted, the
`region` argument of the provider is used. Changing this creates a new
floating IP (which may or may not have a different address).
:param pulumi.Input[str] subnet_id: The subnet ID of the floating IP pool. Specify this if
the floating IP network has multiple subnets.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A list of external subnet IDs to try over each to
allocate a floating IP address. If a subnet ID in a list has exhausted
floating IP pool, the next subnet ID will be tried. This argument is used only
during the resource creation. Conflicts with a `subnet_id` argument.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the floating IP.
:param pulumi.Input[str] tenant_id: The target tenant ID in which to allocate the floating
IP, if you specify this together with a port_id, make sure the target port
belongs to the same tenant. Changing this creates a new floating IP (which
may or may not have a different address)
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
if address is not None:
pulumi.set(__self__, "address", address)
if all_tags is not None:
pulumi.set(__self__, "all_tags", all_tags)
if description is not None:
pulumi.set(__self__, "description", description)
if dns_domain is not None:
pulumi.set(__self__, "dns_domain", dns_domain)
if dns_name is not None:
pulumi.set(__self__, "dns_name", dns_name)
if fixed_ip is not None:
pulumi.set(__self__, "fixed_ip", fixed_ip)
if pool is not None:
pulumi.set(__self__, "pool", pool)
if port_id is not None:
pulumi.set(__self__, "port_id", port_id)
if region is not None:
pulumi.set(__self__, "region", region)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if subnet_ids is not None:
pulumi.set(__self__, "subnet_ids", subnet_ids)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if value_specs is not None:
pulumi.set(__self__, "value_specs", value_specs)
@property
@pulumi.getter
def address(self) -> Optional[pulumi.Input[str]]:
"""
The actual/specific floating IP to obtain. By default,
non-admin users are not able to specify a floating IP, so you must either be
an admin user or have had a custom policy or role applied to your OpenStack
user or project.
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address", value)
@property
@pulumi.getter(name="allTags")
def all_tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The collection of tags assigned on the floating IP, which have
been explicitly and implicitly added.
"""
return pulumi.get(self, "all_tags")
@all_tags.setter
def all_tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "all_tags", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable description for the floating IP.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="dnsDomain")
def dns_domain(self) -> Optional[pulumi.Input[str]]:
"""
The floating IP DNS domain. Available, when Neutron
DNS extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
"""
return pulumi.get(self, "dns_domain")
@dns_domain.setter
def dns_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_domain", value)
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> Optional[pulumi.Input[str]]:
"""
The floating IP DNS name. Available, when Neutron DNS
extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
"""
return pulumi.get(self, "dns_name")
@dns_name.setter
def dns_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_name", value)
@property
@pulumi.getter(name="fixedIp")
def fixed_ip(self) -> Optional[pulumi.Input[str]]:
"""
Fixed IP of the port to associate with this floating IP. Required if
the port has multiple fixed IPs.
"""
return pulumi.get(self, "fixed_ip")
@fixed_ip.setter
def fixed_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fixed_ip", value)
@property
@pulumi.getter
def pool(self) -> Optional[pulumi.Input[str]]:
"""
The name of the pool from which to obtain the floating
IP. Changing this creates a new floating IP.
"""
return pulumi.get(self, "pool")
@pool.setter
def pool(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pool", value)
@property
@pulumi.getter(name="portId")
def port_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of an existing port with at least one IP address to
associate with this floating IP.
"""
return pulumi.get(self, "port_id")
@port_id.setter
def port_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "port_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a floating IP that can be used with
another networking resource, such as a load balancer. If omitted, the
`region` argument of the provider is used. Changing this creates a new
floating IP (which may or may not have a different address).
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The subnet ID of the floating IP pool. Specify this if
the floating IP network has multiple subnets.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of external subnet IDs to try over each to
allocate a floating IP address. If a subnet ID in a list has exhausted
floating IP pool, the next subnet ID will be tried. This argument is used only
during the resource creation. Conflicts with a `subnet_id` argument.
"""
return pulumi.get(self, "subnet_ids")
@subnet_ids.setter
def subnet_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "subnet_ids", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of string tags for the floating IP.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The target tenant ID in which to allocate the floating
IP, if you specify this together with a port_id, make sure the target port
belongs to the same tenant. Changing this creates a new floating IP (which
may or may not have a different address)
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
@value_specs.setter
def value_specs(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "value_specs", value)
class FloatingIp(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
address: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
dns_domain: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
fixed_ip: Optional[pulumi.Input[str]] = None,
pool: Optional[pulumi.Input[str]] = None,
port_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
"""
## Import
Floating IPs can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:networking/floatingIp:FloatingIp floatip_1 2c7f39f3-702b-48d1-940c-b50384177ee1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] address: The actual/specific floating IP to obtain. By default,
non-admin users are not able to specify a floating IP, so you must either be
an admin user or have had a custom policy or role applied to your OpenStack
user or project.
:param pulumi.Input[str] description: Human-readable description for the floating IP.
:param pulumi.Input[str] dns_domain: The floating IP DNS domain. Available, when Neutron
DNS extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] dns_name: The floating IP DNS name. Available, when Neutron DNS
extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] fixed_ip: Fixed IP of the port to associate with this floating IP. Required if
the port has multiple fixed IPs.
:param pulumi.Input[str] pool: The name of the pool from which to obtain the floating
IP. Changing this creates a new floating IP.
:param pulumi.Input[str] port_id: ID of an existing port with at least one IP address to
associate with this floating IP.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a floating IP that can be used with
another networking resource, such as a load balancer. If omitted, the
`region` argument of the provider is used. Changing this creates a new
floating IP (which may or may not have a different address).
:param pulumi.Input[str] subnet_id: The subnet ID of the floating IP pool. Specify this if
the floating IP network has multiple subnets.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A list of external subnet IDs to try over each to
allocate a floating IP address. If a subnet ID in a list has exhausted
floating IP pool, the next subnet ID will be tried. This argument is used only
during the resource creation. Conflicts with a `subnet_id` argument.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the floating IP.
:param pulumi.Input[str] tenant_id: The target tenant ID in which to allocate the floating
IP, if you specify this together with a port_id, make sure the target port
belongs to the same tenant. Changing this creates a new floating IP (which
may or may not have a different address)
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: FloatingIpArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Floating IPs can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:networking/floatingIp:FloatingIp floatip_1 2c7f39f3-702b-48d1-940c-b50384177ee1
```
:param str resource_name: The name of the resource.
:param FloatingIpArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(FloatingIpArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
address: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
dns_domain: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
fixed_ip: Optional[pulumi.Input[str]] = None,
pool: Optional[pulumi.Input[str]] = None,
port_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = FloatingIpArgs.__new__(FloatingIpArgs)
__props__.__dict__["address"] = address
__props__.__dict__["description"] = description
__props__.__dict__["dns_domain"] = dns_domain
__props__.__dict__["dns_name"] = dns_name
__props__.__dict__["fixed_ip"] = fixed_ip
if pool is None and not opts.urn:
raise TypeError("Missing required property 'pool'")
__props__.__dict__["pool"] = pool
__props__.__dict__["port_id"] = port_id
__props__.__dict__["region"] = region
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["subnet_ids"] = subnet_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["value_specs"] = value_specs
__props__.__dict__["all_tags"] = None
super(FloatingIp, __self__).__init__(
'openstack:networking/floatingIp:FloatingIp',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
address: Optional[pulumi.Input[str]] = None,
all_tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
dns_domain: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
fixed_ip: Optional[pulumi.Input[str]] = None,
pool: Optional[pulumi.Input[str]] = None,
port_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'FloatingIp':
"""
Get an existing FloatingIp resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] address: The actual/specific floating IP to obtain. By default,
non-admin users are not able to specify a floating IP, so you must either be
an admin user or have had a custom policy or role applied to your OpenStack
user or project.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_tags: The collection of tags assigned on the floating IP, which have
been explicitly and implicitly added.
:param pulumi.Input[str] description: Human-readable description for the floating IP.
:param pulumi.Input[str] dns_domain: The floating IP DNS domain. Available, when Neutron
DNS extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] dns_name: The floating IP DNS name. Available, when Neutron DNS
extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
:param pulumi.Input[str] fixed_ip: Fixed IP of the port to associate with this floating IP. Required if
the port has multiple fixed IPs.
:param pulumi.Input[str] pool: The name of the pool from which to obtain the floating
IP. Changing this creates a new floating IP.
:param pulumi.Input[str] port_id: ID of an existing port with at least one IP address to
associate with this floating IP.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a floating IP that can be used with
another networking resource, such as a load balancer. If omitted, the
`region` argument of the provider is used. Changing this creates a new
floating IP (which may or may not have a different address).
:param pulumi.Input[str] subnet_id: The subnet ID of the floating IP pool. Specify this if
the floating IP network has multiple subnets.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A list of external subnet IDs to try over each to
allocate a floating IP address. If a subnet ID in a list has exhausted
floating IP pool, the next subnet ID will be tried. This argument is used only
during the resource creation. Conflicts with a `subnet_id` argument.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the floating IP.
:param pulumi.Input[str] tenant_id: The target tenant ID in which to allocate the floating
IP, if you specify this together with a port_id, make sure the target port
belongs to the same tenant. Changing this creates a new floating IP (which
may or may not have a different address)
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _FloatingIpState.__new__(_FloatingIpState)
__props__.__dict__["address"] = address
__props__.__dict__["all_tags"] = all_tags
__props__.__dict__["description"] = description
__props__.__dict__["dns_domain"] = dns_domain
__props__.__dict__["dns_name"] = dns_name
__props__.__dict__["fixed_ip"] = fixed_ip
__props__.__dict__["pool"] = pool
__props__.__dict__["port_id"] = port_id
__props__.__dict__["region"] = region
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["subnet_ids"] = subnet_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["value_specs"] = value_specs
return FloatingIp(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def address(self) -> pulumi.Output[str]:
"""
The actual/specific floating IP to obtain. By default,
non-admin users are not able to specify a floating IP, so you must either be
an admin user or have had a custom policy or role applied to your OpenStack
user or project.
"""
return pulumi.get(self, "address")
@property
@pulumi.getter(name="allTags")
def all_tags(self) -> pulumi.Output[Sequence[str]]:
"""
The collection of tags assigned on the floating IP, which have
been explicitly and implicitly added.
"""
return pulumi.get(self, "all_tags")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Human-readable description for the floating IP.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="dnsDomain")
def dns_domain(self) -> pulumi.Output[str]:
"""
The floating IP DNS domain. Available, when Neutron
DNS extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
"""
return pulumi.get(self, "dns_domain")
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> pulumi.Output[str]:
"""
The floating IP DNS name. Available, when Neutron DNS
extension is enabled. The data in this attribute will be published in an
external DNS service when Neutron is configured to integrate with such a
service. Changing this creates a new floating IP.
"""
return pulumi.get(self, "dns_name")
@property
@pulumi.getter(name="fixedIp")
def fixed_ip(self) -> pulumi.Output[str]:
"""
Fixed IP of the port to associate with this floating IP. Required if
the port has multiple fixed IPs.
"""
return pulumi.get(self, "fixed_ip")
@property
@pulumi.getter
def pool(self) -> pulumi.Output[str]:
"""
The name of the pool from which to obtain the floating
IP. Changing this creates a new floating IP.
"""
return pulumi.get(self, "pool")
@property
@pulumi.getter(name="portId")
def port_id(self) -> pulumi.Output[str]:
"""
ID of an existing port with at least one IP address to
associate with this floating IP.
"""
return pulumi.get(self, "port_id")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a floating IP that can be used with
another networking resource, such as a load balancer. If omitted, the
`region` argument of the provider is used. Changing this creates a new
floating IP (which may or may not have a different address).
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Output[str]:
"""
The subnet ID of the floating IP pool. Specify this if
the floating IP network has multiple subnets.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of external subnet IDs to try over each to
allocate a floating IP address. If a subnet ID in a list has exhausted
floating IP pool, the next subnet ID will be tried. This argument is used only
during the resource creation. Conflicts with a `subnet_id` argument.
"""
return pulumi.get(self, "subnet_ids")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A set of string tags for the floating IP.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
"""
The target tenant ID in which to allocate the floating
IP, if you specify this together with a port_id, make sure the target port
belongs to the same tenant. Changing this creates a new floating IP (which
may or may not have a different address)
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
| 46.735294
| 134
| 0.637624
| 5,646
| 42,903
| 4.710946
| 0.044279
| 0.087262
| 0.086322
| 0.07196
| 0.934356
| 0.926197
| 0.915708
| 0.907286
| 0.904467
| 0.885969
| 0
| 0.001801
| 0.275226
| 42,903
| 917
| 135
| 46.78626
| 0.853578
| 0.446659
| 0
| 0.845133
| 1
| 0
| 0.069986
| 0.00202
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165929
| false
| 0.002212
| 0.011062
| 0
| 0.276549
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b18717bcc6f9f01b5a98da3a742d40ea35412748
| 16,089
|
py
|
Python
|
test/test_kas_pymoo.py
|
ryanstwrt/multi_agent_blackboard_system
|
b8f6ab71dfe0742a6f690de19b97d10504fc1768
|
[
"MIT"
] | 1
|
2021-08-02T10:29:35.000Z
|
2021-08-02T10:29:35.000Z
|
test/test_kas_pymoo.py
|
ryanstwrt/multi_agent_blackboard_system
|
b8f6ab71dfe0742a6f690de19b97d10504fc1768
|
[
"MIT"
] | 10
|
2020-03-14T07:39:34.000Z
|
2021-11-03T22:55:28.000Z
|
test/test_kas_pymoo.py
|
ryanstwrt/multi_agent_blackboard_system
|
b8f6ab71dfe0742a6f690de19b97d10504fc1768
|
[
"MIT"
] | 1
|
2021-07-18T14:43:10.000Z
|
2021-07-18T14:43:10.000Z
|
from osbrain import run_nameserver
from osbrain import run_agent
import time
import mabs.ka.ka_s.pymoo_plugin as pm
import mabs.bb.blackboard_optimization as bb_opt
from pymoo.factory import get_algorithm, get_termination
from mabs.utils.problem import BenchmarkProblem
import numpy as np
def test_init():
try:
ns = run_nameserver()
except OSError:
time.sleep(0.5)
ns = run_nameserver()
ka_s = run_agent(name='ka_pymoo', base=pm.PyMooAlgorithm)
assert ka_s.get_attr('pymoo_algorithm_name') == 'nsga2'
assert ka_s.get_attr('crossover') == 'real_sbx'
assert ka_s.get_attr('mutation') == 'real_pm'
assert ka_s.get_attr('_class') == 'local search pymoo nsga2'
assert ka_s.get_attr('termination_type') == 'n_eval'
assert ka_s.get_attr('termination_criteria') == 250
assert ka_s.get_attr('termination') == None
assert ka_s.get_attr('pop_size') == 25
assert ka_s.get_attr('n_offspring') == 10
assert ka_s.get_attr('initial_pop') == None
ns.shutdown()
time.sleep(0.1)
def test_setup_mixed():
try:
ns = run_nameserver()
except OSError:
time.sleep(0.5)
ns = run_nameserver()
ka_s = run_agent(name='ka_pymoo', base=pm.PyMooAlgorithm)
objs = {'f0': {'ll':0.0, 'ul':500.0, 'goal':'lt', 'variable type': float},
'f1': {'ll':0.0, 'ul':50.0, 'goal':'lt', 'variable type': float},}
dvs = {'x0': {'options' : [0.20, 0.31, 0.40, 0.44, 0.60, 0.62, 0.79, 0.80, 0.88, 0.93, 1.0, 1.20, 1.24, 1.32, 1.40, 1.55, 1.58, 1.60, 1.76, 1.80, 1.86, 2.0, 2.17, 2.20, 2.37, 2.40, 2.48, 2.60, 2.64, 2.79, 2.80, 3.0, 3.08, 3,10, 3.16, 3.41, 3.52, 3.60, 3.72, 3.95, 3.96, 4.0, 4.03, 4.20, 4.34, 4.40, 4.65, 4.74, 4.80, 4.84, 5.0, 5.28, 5.40, 5.53, 5.72, 6.0, 6.16, 6.32, 6.60, 7.11, 7.20, 7.80, 7.90, 8.0, 8.40, 8.69, 9.0, 9.48, 10.27, 11.0, 11.06, 11.85, 12.0, 13.0, 14.0, 15.0], 'variable type': float},
'x1': {'ll': 0.0, 'ul':20.0, 'variable type': float},
'x2': {'ll': 0.0, 'ul':40.0, 'variable type': float},}
ka_s.set_attr(_design_variables=dvs)
ka_s.set_attr(_objectives=objs)
ka_s.set_attr(lvl_read = {'core_[1,10.0,10.5]': {'pareto type' : 'pareto', 'fitness function' : 1.0},
'core_[1,10.0,20.0]': {'pareto type' : 'pareto', 'fitness function' : 1.0}})
ka_s.set_attr(_lvl_data = {'core_[1,10.0,10.5]': {'design variables': {'x0': 1, 'x1': 10.0, 'x2': 10.50},
'objective functions': {'f0' : 450.11, 'f1' : 35.12},
'constraints': {}},
'core_[1,10.0,20.0]': {'design variables': {'x0': 1, 'x1': 10.0, 'x2': 20.0},
'objective functions': {'f0' : 310.11,'f1' : 25.12},
'constraints': {}}})
assert ka_s.get_attr('crossover') == 'real_sbx'
assert ka_s.get_attr('mutation') == 'real_pm'
assert ka_s.get_attr('_class') == 'local search pymoo nsga2'
assert ka_s.get_attr('termination_type') == 'n_eval'
assert ka_s.get_attr('termination_criteria') == 250
assert ka_s.get_attr('termination') == None
assert ka_s.get_attr('pop_size') == 25
assert ka_s.get_attr('n_offspring') == 10
assert ka_s.get_attr('initial_pop') == None
ka_s.setup_problem()
assert np.array([[90.0,80.0,0.5], [75.0,65.0,0.9]]).all() == ka_s.get_attr('initial_pop').all()
assert type(get_termination('n_eval', 250)) == type(ka_s.get_attr('termination'))
problem = ka_s.get_attr('_problem')
assert problem.n_var == 3
assert problem.n_obj == 2
assert problem.n_constr == 0
assert problem.xl.all() == np.array([0, 0.0, 0.0]).all()
assert problem.xu.all() == np.array([77, 20.0, 40.0]).all()
assert type(get_algorithm('nsga2', sampling=np.array([[1,10.0,10.5], [1,10.0,20.]]), pop_size=25, n_offpsring=10)) == type(ka_s.get_attr('algorithm'))
ns.shutdown()
time.sleep(0.1)
def test_get_pf():
try:
ns = run_nameserver()
except OSError:
time.sleep(0.5)
ns = run_nameserver()
ka_s = run_agent(name='ka_pymoo', base=pm.PyMooAlgorithm)
ka_s.set_attr(_design_variables={'height': {'ll': 50.0, 'ul': 100.0, 'variable type': float},
'smear': {'ll': 50.0, 'ul': 80.0, 'variable type': float},
'pu_content': {'ll': 0.0, 'ul': 1.0, 'variable type': float}} )
ka_s.set_attr(lvl_read= {'core_[90.0,80.0,0.5]': {'pareto type' : 'pareto', 'fitness function' : 1.0},
'core_[75.0,65.0,0.9]': {'pareto type' : 'pareto', 'fitness function' : 1.0}})
ka_s.set_attr(_lvl_data= {'core_[90.0,80.0,0.5]': {'design variables': {'height': 90.0, 'smear': 80.0, 'pu_content': 0.50},
'objective functions': {'reactivity swing' : 704.11, 'burnup' : 65.12}},
'core_[75.0,65.0,0.9]': {'design variables': {'height': 75.0, 'smear': 65.0, 'pu_content': 0.90},
'objective functions': {'reactivity swing' : 710.11,'burnup' : 61.12}}})
X = ka_s.get_pf()
assert np.array([[90.0,80.0,0.5], [75.0,65.0,0.9]]).all() == X.all()
ns.shutdown()
time.sleep(0.1)
def test_setup_problem():
try:
ns = run_nameserver()
except OSError:
time.sleep(0.5)
ns = run_nameserver()
ka_s = run_agent(name='ka_pymoo', base=pm.PyMooAlgorithm)
ka_s.set_attr(_design_variables={'height': {'ll': 50.0, 'ul': 100.0, 'variable type': float},
'smear': {'ll': 50.0, 'ul': 80.0, 'variable type': float},
'pu_content': {'ll': 0.0, 'ul': 1.0, 'variable type': float}})
ka_s.set_attr(_objectives={'reactivity swing': {'ll':0, 'ul':1500, 'goal':'lt', 'variable type': float},
'burnup': {'ll':0, 'ul':150, 'goal':'gt', 'variable type': float}})
ka_s.set_attr(_constraints={'excess reactivity': {'ll': 0, 'ul': 30000, 'variable type': float}})
ka_s.set_attr(lvl_read= {'core_[90.0,80.0,0.5]': {'pareto type' : 'pareto', 'fitness function' : 1.0},
'core_[75.0,65.0,0.9]': {'pareto type' : 'pareto', 'fitness function' : 1.0}})
ka_s.set_attr(_lvl_data= {'core_[90.0,80.0,0.5]': {'design variables': {'height': 90.0, 'smear': 80.0, 'pu_content': 0.50},
'objective functions': {'reactivity swing' : 704.11, 'burnup' : 65.12},
'constraints': {'excess reactivity': 2500}},
'core_[75.0,65.0,0.9]': {'design variables': {'height': 75.0, 'smear': 65.0, 'pu_content': 0.90},
'objective functions': {'reactivity swing' : 710.11,'burnup' : 61.12},
'constraints': {'excess reactivity': 5000}}})
ka_s.set_attr(pop_size=2)
ka_s.set_attr(n_pop=1)
ka_s.setup_problem()
assert np.array([[90.0,80.0,0.5], [75.0,65.0,0.9]]).all() == ka_s.get_attr('initial_pop').all()
assert type(get_termination('n_eval', 250)) == type(ka_s.get_attr('termination'))
problem = ka_s.get_attr('_problem')
assert problem.n_var == 3
assert problem.n_obj == 2
assert problem.n_constr == 1
assert problem.xl.all() == np.array([50.0, 50.0, 0.0]).all()
assert problem.xu.all() == np.array([100.0, 80.0, 1.0]).all()
assert problem.base.get_attr('_design_variables') == {'height': {'ll': 50.0, 'ul': 100.0, 'variable type': float},
'smear': {'ll': 50.0, 'ul': 80.0, 'variable type': float},
'pu_content': {'ll': 0.0, 'ul': 1.0, 'variable type': float}}
assert type(get_algorithm('nsga2', sampling=np.array([[90.0,80.0,0.5], [75.0,65.0,0.9]]), pop_size=31, n_offpsring=10)) == type(ka_s.get_attr('algorithm'))
ns.shutdown()
time.sleep(0.1)
def test_search_method():
try:
ns = run_nameserver()
except OSError:
time.sleep(0.5)
ns = run_nameserver()
dvs = {'x{}'.format(x):{'ll':0.0, 'ul':1.0, 'variable type': float} for x in range(3)}
objs = {'f{}'.format(x): {'ll':0.0, 'ul':1000, 'goal':'lt', 'variable type': float} for x in range(3)}
problem = BenchmarkProblem(design_variables=dvs,
objectives=objs,
constraints={},
benchmark_name = 'dtlz1')
bb = run_agent(name='blackboard', base=bb_opt.BbOpt)
bb.set_attr(constraints={})
bb.initialize_abstract_level_3(objectives=objs, design_variables=dvs, constraints={})
bb.connect_agent(pm.PyMooAlgorithm, 'ka_nsga2')
ka = bb.get_attr('_proxy_server')
ka_s = ka.proxy('ka_nsga2')
ka_s.set_attr(problem=problem)
ka_s.set_random_seed(seed=10893)
bb.update_abstract_lvl(3, 'core_[0.650,0.650,0.4]', {'design variables': {'x0': 0.650, 'x1': 0.650, 'x2': 0.4},
'objective functions': {'f0': 365.0, 'f1': 500.0, 'f2' : 600.0}}, panel='old')
bb.update_abstract_lvl(1, 'core_[0.650,0.650,0.4]', {'pareto type' : 'pareto', 'fitness function' : 1.0})
bb.update_abstract_lvl(3, 'core_[0.650,0.750,0.24]', {'design variables': {'x0': 0.650, 'x1': 0.750, 'x2': 0.24},
'objective functions': {'f0': 36.0, 'f1': 50.0, 'f2' : 60.0}}, panel='old')
bb.update_abstract_lvl(1, 'core_[0.650,0.750,0.24]', {'pareto type' : 'pareto', 'fitness function' : 1.0})
ka_s.set_attr(lvl_read=bb.get_blackboard()['level 1'])
ka_s.set_attr(_lvl_data=bb.get_blackboard()['level 3']['old'])
ka_s.set_attr(pop_size=2)
ka_s.set_attr(n_pop=1)
ka_s.set_attr(termination_criteria=6)
ka_s.search_method()
ka_s.get_attr('_class')
assert list(bb.get_blackboard()['level 3']['new'].keys()) == ['core_[0.65,0.65,0.4]', 'core_[0.65,0.75,0.24]', 'core_[0.65,0.6559273381756285,0.4]', 'core_[0.5913069633410922,0.65,0.4]', 'core_[0.4455492956093361,0.65,0.4]', 'core_[0.5913069633410922,0.5932894680193752,0.4093256985734208]']
ns.shutdown()
time.sleep(0.1)
def test_search_method_mixed():
try:
ns = run_nameserver()
except OSError:
time.sleep(0.5)
ns = run_nameserver()
objs = {'f0': {'ll':0.0, 'ul':500.0, 'goal':'lt', 'variable type': float},
'f1': {'ll':0.0, 'ul':50.0, 'goal':'lt', 'variable type': float},}
dvs = {'x0': {'options' : [0.20, 0.31, 0.40, 0.44, 0.60, 0.62, 0.79, 0.80, 0.88, 0.93, 1.0, 1.20, 1.24, 1.32, 1.40, 1.55, 1.58, 1.60, 1.76, 1.80, 1.86, 2.0, 2.17, 2.20, 2.37, 2.40, 2.48, 2.60, 2.64, 2.79, 2.80, 3.0, 3.08, 3,10, 3.16, 3.41, 3.52, 3.60, 3.72, 3.95, 3.96, 4.0, 4.03, 4.20, 4.34, 4.40, 4.65, 4.74, 4.80, 4.84, 5.0, 5.28, 5.40, 5.53, 5.72, 6.0, 6.16, 6.32, 6.60, 7.11, 7.20, 7.80, 7.90, 8.0, 8.40, 8.69, 9.0, 9.48, 10.27, 11.0, 11.06, 11.85, 12.0, 13.0, 14.0, 15.0], 'variable type': float},
'x1': {'ll': 0.0, 'ul':20.0, 'variable type': float},
'x2': {'ll': 0.0, 'ul':40.0, 'variable type': float},}
problem = BenchmarkProblem(design_variables=dvs,
objectives=objs,
constraints={},
benchmark_name = 're22')
bb = run_agent(name='blackboard', base=bb_opt.BbOpt)
bb.set_attr(constraints={})
bb.initialize_abstract_level_3(objectives=objs, design_variables=dvs, constraints={})
bb.connect_agent(pm.PyMooAlgorithm, 'ka_nsga2')
ka = bb.get_attr('_proxy_server')
ka_s = ka.proxy('ka_nsga2')
ka_s.set_attr(problem=problem)
ka_s.set_random_seed(seed=10893)
bb.update_abstract_lvl(3, 'core_[1.0,10.0,10.5]', {'design variables': {'x0': 1.0, 'x1': 10.0, 'x2': 10.50},
'objective functions': {'f0' : 450.11, 'f1' : 35.12},
'constraints': {}}, panel='old')
bb.update_abstract_lvl(1, 'core_[1.0,10.0,10.5]', {'pareto type' : 'pareto', 'fitness function' : 1.0})
bb.update_abstract_lvl(3, 'core_[1.0,10.0,20.0]', {'design variables': {'x0': 1.0, 'x1': 10.0, 'x2': 20.0},
'objective functions': {'f0' : 310.11,'f1' : 25.12},
'constraints': {}}, panel='old')
bb.update_abstract_lvl(1, 'core_[1.0,10.0,20.0]', {'pareto type' : 'pareto', 'fitness function' : 1.0})
ka_s.set_attr(lvl_read=bb.get_blackboard()['level 1'])
ka_s.set_attr(_lvl_data=bb.get_blackboard()['level 3']['old'])
ka_s.set_attr(pop_size=2)
ka_s.set_attr(n_pop=1)
ka_s.set_attr(termination_criteria=6)
ka_s.search_method()
ka_s.get_attr('_class')
cores = ['core_[1.0,10.0,10.5]', 'core_[0.0,10.0,18.218799041622493]', 'core_[0.0,13.747225989026843,10.5]', 'core_[3.0,18.179701390487754,7.564838427000653]', 'core_[1.0,10.0,20.0]', 'core_[5.0,10.0,10.5]']
assert set(list(bb.get_blackboard()['level 3']['new'].keys())) == set(cores)
ns.shutdown()
time.sleep(0.1)
def test_force_shutdown():
try:
ns = run_nameserver()
except OSError:
time.sleep(0.5)
ns = run_nameserver()
dvs = {'x{}'.format(x):{'ll':0.0, 'ul':1.0, 'variable type': float} for x in range(3)}
objs = {'f{}'.format(x): {'ll':0.0, 'ul':1000, 'goal':'lt', 'variable type': float} for x in range(3)}
problem = BenchmarkProblem(design_variables=dvs,
objectives=objs,
constraints={},
benchmark_name = 'dtlz1')
bb = run_agent(name='blackboard', base=bb_opt.BbOpt)
bb.set_attr(constraints={})
bb.initialize_abstract_level_3(objectives=objs, design_variables=dvs, constraints={})
bb.initialize_metadata_level()
bb.connect_agent(pm.PyMooAlgorithm, 'ka_nsga2')
ka = bb.get_attr('_proxy_server')
ka_s = ka.proxy('ka_nsga2')
ka_s.set_random_seed(seed=10893)
bb.update_abstract_lvl(3, 'core_[0.650,0.650,0.4]', {'design variables': {'x0': 0.650, 'x1': 0.650, 'x2': 0.4},
'objective functions': {'f0': 365.0, 'f1': 500.0, 'f2' : 600.0}}, panel='old')
bb.update_abstract_lvl(1, 'core_[0.650,0.650,0.4]', {'pareto type' : 'pareto', 'fitness function' : 1.0})
bb.update_abstract_lvl(3, 'core_[0.650,0.750,0.24]', {'design variables': {'x0': 0.650, 'x1': 0.750, 'x2': 0.24},
'objective functions': {'f0': 36.0, 'f1': 50.0, 'f2' : 60.0}}, panel='old')
bb.update_abstract_lvl(1, 'core_[0.650,0.750,0.24]', {'pareto type' : 'pareto', 'fitness function' : 1.0})
ka_s.set_attr(lvl_read=bb.get_blackboard()['level 1'])
ka_s.set_attr(_lvl_data=bb.get_blackboard()['level 3']['old'])
ka_s.set_attr(pop_size=2)
ka_s.set_attr(n_pop=1)
ka_s.set_attr(termination_criteria=15)
ka_s.set_attr(problem=problem, debug_wait=True, debug_wait_time=0.05)
bb.set_attr(final_trigger=0, _kaar = {0: {}, 1: {'ka_nsga2': 2}}, _ka_to_execute=('ka_nsga2', 2))
bb.send_executor()
time.sleep(0.1)
bb.send_shutdown()
time.sleep(0.1)
assert ns.agents() == ['blackboard', 'ka_nsga2']
assert list(bb.get_blackboard()['level 3']['new'].keys()) == ['core_[0.65,0.65,0.4]', 'core_[0.65,0.75,0.24]']
ns.shutdown()
time.sleep(0.1)
| 56.255245
| 509
| 0.542234
| 2,464
| 16,089
| 3.372565
| 0.096591
| 0.027798
| 0.025271
| 0.038508
| 0.891697
| 0.879422
| 0.857401
| 0.848736
| 0.824549
| 0.811552
| 0
| 0.134567
| 0.260986
| 16,089
| 286
| 510
| 56.255245
| 0.56434
| 0
| 0
| 0.753036
| 0
| 0
| 0.21504
| 0.0312
| 0
| 0
| 0
| 0
| 0.165992
| 1
| 0.02834
| false
| 0
| 0.032389
| 0
| 0.060729
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1a5f3d17045677a398539c91fe3de9dfee06695
| 199
|
py
|
Python
|
tests/statistics/test_v.py
|
philihp/openskill.py
|
657a7ddeb81564a23b9aaf19ba225d82b1193046
|
[
"MIT"
] | 120
|
2021-09-03T03:06:11.000Z
|
2022-03-28T05:54:54.000Z
|
tests/statistics/test_v.py
|
philihp/openskill.py
|
657a7ddeb81564a23b9aaf19ba225d82b1193046
|
[
"MIT"
] | 48
|
2021-09-23T07:15:13.000Z
|
2022-03-31T14:47:25.000Z
|
tests/statistics/test_v.py
|
philihp/openskill.py
|
657a7ddeb81564a23b9aaf19ba225d82b1193046
|
[
"MIT"
] | 6
|
2022-01-20T16:45:28.000Z
|
2022-03-28T23:48:07.000Z
|
from openskill.statistics import v
def test_v():
assert v(1, 2) == 1.525135276160981
assert v(0, 2) == 2.373215532822843
assert v(0, -1) == 0.2875999709391784
assert v(0, 10) == 10
| 22.111111
| 41
| 0.638191
| 31
| 199
| 4.064516
| 0.483871
| 0.222222
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.387097
| 0.221106
| 199
| 8
| 42
| 24.875
| 0.425806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.166667
| true
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1adaa74939cb036de01cd101afc3ede9af1186a
| 42,790
|
py
|
Python
|
features/idioms/2017-11-14/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2017-12-14T14:25:17.000Z
|
2019-03-09T03:29:12.000Z
|
features/idioms/2017-11-14/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 10
|
2019-06-14T09:12:55.000Z
|
2021-10-01T12:15:43.000Z
|
features/idioms/2017-11-14/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2019-05-10T14:59:48.000Z
|
2022-03-07T16:34:23.000Z
|
from regression_tests import *
class CommonTest(Test):
# Check presence of all functions.
#
def test_has_all_functions(self):
assert self.out_c.has_func_matching(r'_?test_01_LessThanZero')
assert self.out_c.has_func_matching(r'_?test_02_GreaterEqualZero')
assert self.out_c.has_func_matching(r'_?test_03_XorAssignZero')
assert self.out_c.has_func_matching(r'_?test_04_BitShiftMult')
assert self.out_c.has_func_matching(r'_?test_05_DivByMinusTwo')
assert self.out_c.has_func_matching(r'_?test_06_BitShiftDiv')
assert self.out_c.has_func_matching(r'_?test_07_MagicDivSigned')
assert self.out_c.has_func_matching(r'_?test_08_MagicDivSignedNegative')
assert self.out_c.has_func_matching(r'_?test_09_MagicDivUnsinged')
assert self.out_c.has_func_matching(r'_?test_10_XorMinusOne')
assert self.out_c.has_func_matching(r'_?test_11_SignedModulo')
assert self.out_c.has_func_matching(r'_?test_12_UnsignedModulo')
assert self.out_c.has_func_matching(r'_?test_13_FloatNeg')
assert self.out_c.has_func_matching(r'_?test_14_CopySign')
assert self.out_c.has_func_matching(r'_?test_15_FloatAbs')
# Idiom test LessThanZero
#
# TODO: thumb - bug
def test_c_does_not_contain_idiom_LessThanZero(self):
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_01_LessThanZero: %d", \(int32_t\)\(\S+ < 0\)\);')
# Idiom test GreaterEqualZero
#
# TODO: powerpc - bug - "(int32_t)(-v1 < 1)" instead of "v1 > -1"
# TODO: pic32 /O1/ - ---||---
# TODO: arm /O1,O2,O#/ - ---||---
# TODO: thumb - bug
def test_c_does_not_contain_idiom_GreaterEqualZero(self):
if self.local_arch in {'mips', 'x86'}:
assert self.out_c.contains(r'printf\("test_02_GreaterEqualZero: %d", \(int32_t\)\(\S+ > -1\)\);')
# Idiom test LessThanZero
#
# TODO: thumb /O0/ - bug - empty function
def test_c_does_not_contain_idiom_XorAssignZero(self):
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_03_XorAssignZero: %d", 0\);')
# Idiom test BitShiftMult
#
def test_c_does_not_contain_idiom_BitShiftMult(self):
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_01: %d", 2 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_02: %d", 4 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_03: %d", 8 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_04: %d", 16 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_05: %d", 32 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_06: %d", 64 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_07: %d", 128 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_08: %d", 256 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_09: %d", 512 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_10: %d", 1024 \* \S+\);')
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_20: %d", 0x100000 \* \S+\);')
# TODO: thumb - bug
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_04_BitShiftMult_30: %d", 0x40000000 \* \S+\);')
# Idiom test DivByMinusTwo
#
# TODO: x86 - bug - "((int32_t)(v1 < 0) + v1) / -2)" instead of "v1 / -2"
# TODO: thumb - bug - "-((((int32_t)(v1 < 0) + v1) / 2))" instead of "v1 / -2"
def test_c_does_not_contain_idiom_DivByMinusTwo(self):
if self.local_arch not in {'thumb', 'x86'}:
assert self.out_c.contains(r'printf\("test_05_DivByMinusTwo: %d", \S+ / -2\);')
# Idiom test BitShiftDiv
#
# TODO: x86 - bug - "(v1 < 0 ? v1 + 3 : v1) / 4" instead of "v1 / 4"
# TODO: pic32 - bug - "(v1 < 0) + v1) / 2)" instead of "v1 / 4"
# TODO: arm - bug - "((int32_t)(v1 < 0) + v1) / 2)" instead of "v1 / 2"
# TODO: thumb - bug - "((int32_t)(v1 < 0) + v1) / 2)" instead of "v1 / 2"
def test_c_does_not_contain_idiom_BitShiftDiv(self):
if self.local_arch in {'mips', 'powerpc'}:
# TODO: powerpc - bug - "v2 / 2 | v2 & -0x80000000" instead of "v1 / 2"
if self.local_arch != 'powerpc':
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_01: %d", \S+ / 2\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_02: %d", \S+ / 4\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_03: %d", \S+ / 8\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_04: %d", \S+ / 16\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_05: %d", \S+ / 32\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_06: %d", \S+ / 64\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_07: %d", \S+ / 128\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_08: %d", \S+ / 256\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_09: %d", \S+ / 512\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_10: %d", \S+ / 1024\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_20: %d", \S+ / 0x100000\);')
assert self.out_c.contains(r'printf\("test_06_BitShiftDiv_30: %d", \S+ / 0x40000000\);')
# Idiom test MagicDivSigned
#
# TODO: x86 - bug - totally wrong
# TODO: arm - bug - totally wrong
# TODO: powerpc /O1,O2,O3/ - bug - some of the idioms contain type casting
def test_c_does_not_contain_idiom_MagicDivSigned(self):
if self.local_arch in {'mips', 'pic32', 'thumb'}:
# TODO: pic32 - bug
if self.local_arch != 'pic32':
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_03: %d", \S+ / 3\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_05: %d", \S+ / 5\);')
# TODO: pic32 - bug
if self.local_arch != 'pic32':
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_06: %d", \S+ / 6\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_07: %d", \S+ / 7\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_09: %d", \S+ / 9\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_10: %d", \S+ / 10\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_11: %d", \S+ / 11\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_12: %d", \S+ / 12\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_13: %d", \S+ / 13\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_14: %d", \S+ / 14\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_15: %d", \S+ / 15\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_17: %d", \S+ / 17\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_18: %d", \S+ / 18\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_19: %d", \S+ / 19\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_20: %d", \S+ / 20\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_29: %d", \S+ / 29\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_30: %d", \S+ / 30\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_31: %d", \S+ / 31\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_35: %d", \S+ / 35\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_47: %d", \S+ / 47\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_51: %d", \S+ / 51\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_57: %d", \S+ / 57\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_62: %d", \S+ / 62\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_70: %d", \S+ / 70\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_73: %d", \S+ / 73\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_89: %d", \S+ / 89\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_91: %d", \S+ / 91\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_94: %d", \S+ / 94\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_95: %d", \S+ / 95\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_99: %d", \S+ / 99\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_100: %d", \S+ / 100\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_101: %d", \S+ / 101\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_102: %d", \S+ / 102\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_120: %d", \S+ / 120\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_203: %d", \S+ / 203\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_204: %d", \S+ / 204\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_213: %d", \S+ / 213\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_218: %d", \S+ / 218\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_221: %d", \S+ / 221\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_228: %d", \S+ / 228\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_254: %d", \S+ / 254\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_255: %d", \S+ / 255\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_58441: %d", \S+ / 0xe449\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_58442: %d", \S+ / 0xe44a\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_58443: %d", \S+ / 0xe44b\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_58444: %d", \S+ / 0xe44c\);')
# TODO: mips - bug - load of 32-bit numbers
# TODO: pic32 - bug - load of 32-bit numbers
# TODO: thumb - bug - "*(int32_t *)g5 / 0xe44d"
if self.local_arch in {'powerpc', 'arm', 'x86'}:
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_58445: %d", \S+ / 0xe44d\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441835: %d", \S+ / 0x41456eb\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441836: %d", \S+ / 0x41456ec\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441837: %d", \S+ / 0x41456ed\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441838: %d", \S+ / 0x41456ee\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441839: %d", \S+ / 0x41456ef\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441840: %d", \S+ / 0x41456f0\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441841: %d", \S+ / 0x41456f1\);')
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441842: %d", \S+ / 0x41456f2\);')
# TODO: thumb - bug
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_07_MagicDivSigned_68441843: %d", \S+ / 0x41456f3\);')
# Idiom test MagicDivSignedNegative
#
# TODO: x86 - bug - totally wrong
# TODO: arm - bug - totally wrong
# TODO: powerpc /O1,O2,O3/ - bug - some of the idioms contain type casting
# TODO: thumb - worked with old compilers, does not with the new ones.
def test_c_does_not_contain_idiom_MagicDivSignedNegative(self):
if self.local_arch in {'mips', 'pic32'}:
# TODO: pic32 - bug
if self.local_arch != 'pic32':
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_03: %d", \S+ / -3\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_05: %d", \S+ / -5\);')
# TODO: pic32 - bug
if self.local_arch != 'pic32':
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_06: %d", \S+ / -6\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_07: %d", \S+ / -7\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_09: %d", \S+ / -9\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_10: %d", \S+ / -10\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_11: %d", \S+ / -11\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_12: %d", \S+ / -12\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_13: %d", \S+ / -13\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_14: %d", \S+ / -14\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_15: %d", \S+ / -15\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_17: %d", \S+ / -17\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_18: %d", \S+ / -18\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_19: %d", \S+ / -19\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_20: %d", \S+ / -20\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_29: %d", \S+ / -29\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_30: %d", \S+ / -30\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_31: %d", \S+ / -31\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_35: %d", \S+ / -35\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_47: %d", \S+ / -47\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_51: %d", \S+ / -51\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_57: %d", \S+ / -57\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_62: %d", \S+ / -62\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_70: %d", \S+ / -70\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_73: %d", \S+ / -73\);')
# TODO: thumb - strange bug - all the following calls are missing
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_89: %d", \S+ / -89\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_91: %d", \S+ / -91\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_94: %d", \S+ / -94\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_95: %d", \S+ / -95\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_99: %d", \S+ / -99\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_100: %d", \S+ / -100\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_101: %d", \S+ / -101\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_102: %d", \S+ / -102\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_120: %d", \S+ / -120\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_203: %d", \S+ / -203\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_204: %d", \S+ / -204\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_213: %d", \S+ / -213\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_218: %d", \S+ / -218\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_221: %d", \S+ / -221\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_228: %d", \S+ / -228\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_254: %d", \S+ / -254\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_255: %d", \S+ / -255\);')
# TODO: mips - bug - load of 32-bit numbers
# TODO: pic32 - bug - load of 32-bit numbers
if self.local_arch in {'powerpc', 'arm', 'thumb', 'x86'}:
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_58441: %d", \S+ / -0xe449\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_58442: %d", \S+ / -0xe44a\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_58443: %d", \S+ / -0xe44b\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_58444: %d", \S+ / -0xe44c\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_58445: %d", \S+ / -0xe44d\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441835: %d", \S+ / -0x41456eb\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441836: %d", \S+ / -0x41456ec\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441837: %d", \S+ / -0x41456ed\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441838: %d", \S+ / -0x41456ee\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441839: %d", \S+ / -0x41456ef\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441840: %d", \S+ / -0x41456f0\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441841: %d", \S+ / -0x41456f1\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441842: %d", \S+ / -0x41456f2\);')
assert self.out_c.contains(r'printf\("test_08_MagicDivSignedNegative_68441843: %d", \S+ / -0x41456f3\);')
# Idiom test MagicDivUnsinged
#
# TODO: x86 - bug - totally wrong
# TODO: pic32 - bug - totally wrong
# TODO: arm /O1-O3/ - bug - totally wrong
# TODO: powerpc /O1,O2,O3/ - bug - some of the idioms contain type casting
# TODO: thumb - worked with old compilers, does not with the new ones.
def test_c_does_not_contain_idiom_MagicDivUnsinged(self):
if self.local_arch in {'mips'}:
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_03: %d", \S+ / 3\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_05: %d", \S+ / 5\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_06: %d", \S+ / 6\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_07: %d", \S+ / 7\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_09: %d", \S+ / 9\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_10: %d", \S+ / 10\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_11: %d", \S+ / 11\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_12: %d", \S+ / 12\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_13: %d", \S+ / 13\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_14: %d", \S+ / 14\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_15: %d", \S+ / 15\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_17: %d", \S+ / 17\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_18: %d", \S+ / 18\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_19: %d", \S+ / 19\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_20: %d", \S+ / 20\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_29: %d", \S+ / 29\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_30: %d", \S+ / 30\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_31: %d", \S+ / 31\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_35: %d", \S+ / 35\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_47: %d", \S+ / 47\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_51: %d", \S+ / 51\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_57: %d", \S+ / 57\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_62: %d", \S+ / 62\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_70: %d", \S+ / 70\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_73: %d", \S+ / 73\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_89: %d", \S+ / 89\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_91: %d", \S+ / 91\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_94: %d", \S+ / 94\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_95: %d", \S+ / 95\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_99: %d", \S+ / 99\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_100: %d", \S+ / 100\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_101: %d", \S+ / 101\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_102: %d", \S+ / 102\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_120: %d", \S+ / 120\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_203: %d", \S+ / 203\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_204: %d", \S+ / 204\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_213: %d", \S+ / 213\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_218: %d", \S+ / 218\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_221: %d", \S+ / 221\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_228: %d", \S+ / 228\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_254: %d", \S+ / 254\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_255: %d", \S+ / 255\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_58441: %d", \S+ / 0xe449\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_58442: %d", \S+ / 0xe44a\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_58443: %d", \S+ / 0xe44b\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_58444: %d", \S+ / 0xe44c\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_58445: %d", \S+ / 0xe44d\);')
# TODO: mips - bug - load of 32-bit numbers
# TODO: thumb - bug - " struct struct_7 * v1"
if self.local_arch in {'pic32', 'powerpc', 'arm', 'x86'}:
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441835: %d", \S+ / 0x41456eb\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441836: %d", \S+ / 0x41456ec\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441837: %d", \S+ / 0x41456ed\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441838: %d", \S+ / 0x41456ee\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441839: %d", \S+ / 0x41456ef\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441840: %d", \S+ / 0x41456f0\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441841: %d", \S+ / 0x41456f1\);')
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441842: %d", \S+ / 0x41456f2\);')
# TODO: thumb - bug
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_09_MagicDivUnsinged_68441843: %d", \S+ / 0x41456f3\);')
# Idiom test XorMinusOne
#
# TODO: thumb - bug
def test_c_does_not_contain_idiom_XorMinusOne(self):
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_10_XorMinusOne: %d", -1 - \S+\);')
# Idiom test SignedModulo
#
# TODO: x86 - bug - totally wrong
# TODO: arm - bug - totally wrong
# TODO: powerpc - bug - some of the idioms contain type casting
# TODO: thumb - worked with the old compilers, does not with the new ones
def test_c_does_not_contain_idiom_SignedModulo(self):
if self.local_arch in {'mips', 'pic32'}:
# TODO: thumb - bug - % (pow_2) is wrong
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_11_SignedModulo_02: %d", \S+ % 2\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_03: %d", \S+ % 3\);')
# TODO: thumb - bug - % (pow_2) is wrong
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_11_SignedModulo_04: %d", \S+ % 4\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_05: %d", \S+ % 5\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_06: %d", \S+ % 6\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_07: %d", \S+ % 7\);')
# TODO: thumb - bug - % (pow_2) is wrong
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_11_SignedModulo_08: %d", \S+ % 8\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_09: %d", \S+ % 9\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_10: %d", \S+ % 10\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_11: %d", \S+ % 11\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_12: %d", \S+ % 12\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_13: %d", \S+ % 13\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_14: %d", \S+ % 14\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_15: %d", \S+ % 15\);')
# TODO: thumb - bug - % (pow_2) is wrong
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_11_SignedModulo_16: %d", \S+ % 16\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_17: %d", \S+ % 17\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_18: %d", \S+ % 18\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_19: %d", \S+ % 19\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_20: %d", \S+ % 20\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_29: %d", \S+ % 29\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_30: %d", \S+ % 30\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_31: %d", \S+ % 31\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_35: %d", \S+ % 35\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_47: %d", \S+ % 47\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_51: %d", \S+ % 51\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_57: %d", \S+ % 57\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_62: %d", \S+ % 62\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_70: %d", \S+ % 70\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_73: %d", \S+ % 73\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_89: %d", \S+ % 89\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_91: %d", \S+ % 91\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_94: %d", \S+ % 94\);')
# TODO: thumb - multiple bug on thumb /O0 x O1 x .../
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_11_SignedModulo_95: %d", \S+ % 95\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_99: %d", \S+ % 99\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_100: %d", \S+ % 100\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_101: %d", \S+ % 101\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_102: %d", \S+ % 102\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_120: %d", \S+ % 120\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_128: %d", \S+ % 128\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_203: %d", \S+ % 203\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_204: %d", \S+ % 204\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_213: %d", \S+ % 213\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_218: %d", \S+ % 218\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_221: %d", \S+ % 221\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_228: %d", \S+ % 228\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_254: %d", \S+ % 254\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_255: %d", \S+ % 255\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_256: %d", \S+ % 256\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_58441: %d", \S+ % 0xe449\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_58442: %d", \S+ % 0xe44a\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_58443: %d", \S+ % 0xe44b\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_58444: %d", \S+ % 0xe44c\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_58445: %d", \S+ % 0xe44d\);')
# TODO: mips - bug - load of 32-bit numbers
# TODO: pic32 - bug - load of 32-bit numbers
if self.local_arch in {'powerpc', 'arm', 'thumb', 'x86'}:
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441835: %d", \S+ % 0x41456eb\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441836: %d", \S+ % 0x41456ec\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441837: %d", \S+ % 0x41456ed\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441838: %d", \S+ % 0x41456ee\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441839: %d", \S+ % 0x41456ef\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441840: %d", \S+ % 0x41456f0\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441841: %d", \S+ % 0x41456f1\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441842: %d", \S+ % 0x41456f2\);')
assert self.out_c.contains(r'printf\("test_11_SignedModulo_68441843: %d", \S+ % 0x41456f3\);')
# Idiom test UnsignedModulo
#
# TODO: x86 - bug - totally wrong
# TODO: arm - bug - totally wrong
# TODO: thumb /O0/ - minor bug - there is a type cast
# TODO: powerpc - bug - some of the idioms contain type casting
def test_c_does_not_contain_idiom_UnsignedModulo(self):
if self.local_arch in {'mips', 'pic32'}:
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_02: %d", .* % 2\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_03: %d", .* % 3\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_04: %d", .* % 4\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_05: %d", .* % 5\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_06: %d", .* % 6\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_07: %d", .* % 7\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_08: %d", .* % 8\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_09: %d", .* % 9\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_10: %d", .* % 10\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_11: %d", .* % 11\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_12: %d", .* % 12\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_13: %d", .* % 13\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_14: %d", .* % 14\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_15: %d", .* % 15\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_16: %d", .* % 16\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_17: %d", .* % 17\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_18: %d", .* % 18\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_19: %d", .* % 19\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_20: %d", .* % 20\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_29: %d", .* % 29\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_30: %d", .* % 30\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_31: %d", .* % 31\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_35: %d", .* % 35\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_47: %d", .* % 47\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_51: %d", .* % 51\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_57: %d", .* % 57\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_62: %d", .* % 62\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_70: %d", .* % 70\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_73: %d", .* % 73\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_89: %d", .* % 89\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_91: %d", .* % 91\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_94: %d", .* % 94\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_95: %d", .* % 95\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_99: %d", .* % 99\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_100: %d", .* % 100\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_101: %d", .* % 101\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_102: %d", .* % 102\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_120: %d", .* % 120\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_128: %d", .* % 128\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_203: %d", .* % 203\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_204: %d", .* % 204\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_213: %d", .* % 213\)')
# TODO: thumb - strange bug - all the following calls are missing
if self.local_arch != 'thumb':
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_218: %d", .* % 218\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_221: %d", .* % 221\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_228: %d", .* % 228\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_254: %d", .* % 254\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_255: %d", .* % 255\)')
# TODO: mips - bug
# TODO: pic32 - bug
if self.local_arch in {'powerpc', 'arm', 'thumb', 'x86'}:
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_256: %d", .* % 256\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_58441: %d", .* % 0xe449\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_58442: %d", .* % 0xe44a\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_58443: %d", .* % 0xe44b\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_58444: %d", .* % 0xe44c\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_58445: %d", .* % 0xe44d\)')
# TODO: mips - bug - load of 32-bit numbers
# TODO: pic32 - bug - load of 32-bit numbers
if self.local_arch in {'powerpc', 'arm', 'thumb', 'x86'}:
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441835: %d", .* % 0x41456eb\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441836: %d", .* % 0x41456ec\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441837: %d", .* % 0x41456ed\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441838: %d", .* % 0x41456ee\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441839: %d", .* % 0x41456ef\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441840: %d", .* % 0x41456f0\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441841: %d", .* % 0x41456f1\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441842: %d", .* % 0x41456f2\)')
assert self.out_c.contains(r'printf\("test_12_UnsignedModulo_68441843: %d", .* % 0x41456f3\)')
# Idiom test FloatNeg - only for mips and arm (powerpc lacks support of FPU)
#
#
# TODO: mips - bug
# TODO: pic32 - bug
# TODO: arm - it is correct, but there remain many type casts
# TODO: thumb - bug
def test_c_does_not_contain_idiom_FloatNeg(self):
if self.local_arch in {}:
assert self.out_c.contains(r'printf\("test_13_FloatNeg: %f", - \S+\);')
# Idiom test CopySign - only for mips and arm (powerpc lacks support of FPU)
#
# TODO: mips
# TODO: ARM and THUMB fail on -O1 and -O3
def test_c_does_not_contain_idiom_CopySign(self):
if self.local_arch in {'pic32', 'arm', 'thumb'} and self.local_format == 'elf' and self.settings.input.endswith('.O0.elf'):
assert self.out_c.funcs['test_14_CopySign'].calls('copysignf') or self.out_c.funcs['test_14_CopySign'].calls('copysign')
# Idiom test FloatAbs - only for mips and arm (powerpc lacks support of FPU)
#
# TODO: mips - bug
# TODO: pic32 /O1/ - bug
# TODO: thumb /O1,O3/ - bug
def test_c_does_not_contain_idiom_FloatAbs(self):
if self.local_arch in {'arm'}:
if self.out_c.funcs['test_15_FloatAbs'].calls('fabsf') or self.out_c.funcs['test_15_FloatAbs'].calls('fabs'):
test_15_FloatAbs_calls_fabsX = True
else:
test_15_FloatAbs_calls_fabsX = False
assert test_15_FloatAbs_calls_fabsX == True
class TestArmGccElf(CommonTest):
settings = TestSettings( input=['idioms.arm.gcc.O0.elf', 'idioms.arm.gcc.O1.elf', 'idioms.arm.gcc.O3.elf'])
local_arch="arm"
local_format="elf"
class TestArmGccPe(CommonTest):
settings = TestSettings( input=['idioms.arm.gcc.O0.exe', 'idioms.arm.gcc.O1.exe', 'idioms.arm.gcc.O3.exe'])
local_arch="arm"
local_format="pe"
class TestMipsGccElf(CommonTest):
settings = TestSettings( input=['idioms.mips.gcc.O0.elf', 'idioms.mips.gcc.O1.elf', 'idioms.mips.gcc.O3.elf'])
local_arch="mips"
local_format="elf"
class TestPic32GccElf(CommonTest):
settings = TestSettings( input=['idioms.pic32.gcc.O0.elf', 'idioms.pic32.gcc.O1.elf'])
local_arch="pic32"
local_format="elf"
class TestPowerpcGccElf(CommonTest):
settings = TestSettings( input=['idioms.powerpc.gcc.O0.elf', 'idioms.powerpc.gcc.O1.elf', 'idioms.powerpc.gcc.O3.elf'])
local_arch="powerpc"
local_format="elf"
class TestThumbGccElf(CommonTest):
settings = TestSettings( input=['idioms.thumb.gcc.O0.elf', 'idioms.thumb.gcc.O1.elf', 'idioms.thumb.gcc.O3.elf'])
local_arch="thumb"
local_format="elf"
class TestX86GccElf(CommonTest):
settings = TestSettings( input=['idioms.x86.gcc.O0.elf', 'idioms.x86.gcc.O1.elf', 'idioms.x86.gcc.O3.elf'])
local_arch="x86"
local_format="elf"
class TestX86GccPe(CommonTest):
settings = TestSettings( input=['idioms.x86.gcc.O0.exe', 'idioms.x86.gcc.O1.exe', 'idioms.x86.gcc.O3.exe'])
local_arch="x86"
local_format="pe"
| 76.822262
| 132
| 0.618416
| 5,835
| 42,790
| 4.268552
| 0.040103
| 0.095837
| 0.109527
| 0.189987
| 0.904605
| 0.884932
| 0.872807
| 0.864697
| 0.846951
| 0.813908
| 0
| 0.085886
| 0.208179
| 42,790
| 556
| 133
| 76.960432
| 0.649224
| 0.080369
| 0
| 0.084112
| 0
| 0
| 0.489935
| 0.326657
| 0
| 0
| 0.015059
| 0.001799
| 0.792056
| 1
| 0.037383
| false
| 0
| 0.002336
| 0
| 0.116822
| 0.752336
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
496387ed012eb59529eac77507cea222097886ad
| 136
|
py
|
Python
|
test/locust/cb-manager/locustfile.py
|
astrid-project/astrid-framework
|
4dd6ebe124f7c270f6ac4bf5f9ee959dc6d7307b
|
[
"MIT"
] | 3
|
2020-10-14T19:48:37.000Z
|
2021-03-31T12:20:40.000Z
|
test/locust/cb-manager/locustfile.py
|
astrid-project/framework
|
85fafe24f70318a19e2333d23acd48f1121bb9ff
|
[
"MIT"
] | 5
|
2020-02-18T09:55:15.000Z
|
2021-01-04T09:44:14.000Z
|
test/locust/cb-manager/locustfile.py
|
astrid-project/framework
|
85fafe24f70318a19e2333d23acd48f1121bb9ff
|
[
"MIT"
] | 3
|
2021-02-16T18:16:33.000Z
|
2021-07-26T12:10:50.000Z
|
from connection import User as ConnectionUser
from exec_env import User as ExecEnvUser
from network_link import User as NetworkLinkUser
| 34
| 48
| 0.867647
| 20
| 136
| 5.8
| 0.6
| 0.258621
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132353
| 136
| 3
| 49
| 45.333333
| 0.983051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
498233addf69e98cfc7a247bc615dc30eb4248e9
| 30,848
|
py
|
Python
|
miscellany/bernouli_tests.py
|
nlpie/nlp-ensemble-explorer
|
b687684cf557b9badceb435485abc680face77ee
|
[
"Apache-2.0"
] | 1
|
2021-03-15T12:54:37.000Z
|
2021-03-15T12:54:37.000Z
|
miscellany/bernouli_tests.py
|
nlpie/nlp-ensemble-explorer
|
b687684cf557b9badceb435485abc680face77ee
|
[
"Apache-2.0"
] | null | null | null |
miscellany/bernouli_tests.py
|
nlpie/nlp-ensemble-explorer
|
b687684cf557b9badceb435485abc680face77ee
|
[
"Apache-2.0"
] | 1
|
2021-03-15T12:54:44.000Z
|
2021-03-15T12:54:44.000Z
|
#!/usr/bin/env python
# coding: utf-8
# In[2]:
import pandas as pd
import numpy as np
import math
import pymysql
import time
import functools as ft
import glob, os
import operator as op
import shelve
import ipywidgets as widgets
from ipywidgets import interact, interact_manual
from pandas.api.types import is_numeric_dtype
from pathlib import Path
from itertools import combinations, product, permutations
from sqlalchemy.engine import create_engine
from datetime import datetime
from ast import literal_eval
from scipy import stats
from scipy.stats.mstats import gmean
from pythonds.basic.stack import Stack
from pythonds.trees.binaryTree import BinaryTree
from collections import defaultdict
import collections
from typing import List, Set, Tuple
import matplotlib.pyplot as plt
from matplotlib.ticker import StrMethodFormatter
data_directory = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/'
engine = create_engine('mysql+pymysql://gms:nej123@localhost/concepts', pool_pre_ping=True)
# In[80]:
get_ipython().system('jupyter nbconvert --to script bernouli_tests.ipynb')
# In[3]:
# confidence intervals
import numpy as np
from scipy.stats import norm
# Requires numpy and scipy.stats
# https://github.com/sousanunes/confidence_intervals.git
def normal_approximation_binomial_confidence_interval(s, n, confidence_level=.95):
'''Computes the binomial confidence interval of the probability of a success s,
based on the sample of n observations. The normal approximation is used,
appropriate when n is equal to or greater than 30 observations.
The confidence level is between 0 and 1, with default 0.95.
Returns [p_estimate, interval_range, lower_bound, upper_bound].
For reference, see Section 5.2 of Tom Mitchel's "Machine Learning" book.'''
p_estimate = (1.0 * s) / n
interval_range = norm.interval(confidence_level)[1] * np.sqrt( (p_estimate * (1-p_estimate))/n )
return p_estimate, interval_range, p_estimate - interval_range, p_estimate + interval_range
def f1_score_confidence_interval(r, p, dr, dp):
'''Computes the confidence interval for the F1-score measure of classification performance
based on the values of recall (r), precision (p), and their respective confidence
interval ranges, or absolute uncertainty, about the recall (dr) and the precision (dp).
Disclaimer: I derived the formula myself based on f(r,p) = 2rp / (r+p).
Nobody has revised my computation. Feedback appreciated!'''
f1_score = (2.0 * r * p) / (r + p)
left_side = np.abs( (2.0 * r * p) / (r + p) )
right_side = np.sqrt( np.power(dr/r, 2.0) + np.power(dp/p, 2.0) + ((np.power(dr, 2.0)+np.power(dp, 2.0)) / np.power(r + p, 2.0)) )
interval_range = left_side * right_side
return f1_score, interval_range, f1_score - interval_range, f1_score + interval_range
# recall_successes = 42
# recall_obs = 63
# [r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(recall_successes, recall_obs)
# In[3]:
dir_to_process = "/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/files_for_ci"
# In[3]:
# In[66]:
# one off ss
''' F1 precision recall TP FN FP TP/FN n_gold 0 0.718201 0.637617 0.822101 91887 19884 52223
TP FN FP
106875 31880 64609
'''
tp = 12125
tp = 91887
fn = 10622
fn = 19884
recall_obs = tp + fn
fp = 107509
fp = 52223
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
#print(round(f_upper_bound, 3),round(f_lower_bound, 3))
tp = 106875
fn = 31880
recall_obs = tp + fn
fp = 64609
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
#print(round(f_upper_bound, 3),round(f_lower_bound, 3))
# In[72]:
# get ci for single system for table 2 -> TEST
import pandas as pd
input_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/'
file = 'single_system_summary_new.csv'
# change metric here
m_labels = ['F1', 'precision', 'recall']
corpora = ['fairview', 'i2b2', 'mipacq']
semtypes = ['Anatomy',
'Findings',
'Chemicals&Drugs',
'Procedures',
'all']
print('Single system significance within corpus by semtype, across systems:')
for corpus in corpora:
for st in semtypes:
print('CORPUS:', corpus, st)
data = pd.read_csv(input_dir + file)
data = data[data['corpus']==corpus]
data = data[data['semtypes'] == st]
if not data.empty:
for m_label in m_labels:
metric = list()
ci = list()
# entire collection:
for row in data.itertuples():
#print(row.TP, row.FN, row.FP)
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.system, row.corpus, row.semtypes, row.F1))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.system, row.corpus, row.semtypes, row.precision))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.system, row.corpus, row.semtypes, row.recall))
metric.append(m)
# SS for max F1
M = max(metric)
c_i = None
for c in ci:
if M == c[5]:
c_i = (c[0], c[1])
print('st max:', m_label, corpus)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
# ## SS wrt "All groups"
# c_i = None
# for c in ci:
# if 'all' == c[4]:
# c_i = (c[0], c[1])
# print('st all:')
# for c in ci:
# # if c[0] <= F <= c[1]:
# if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
# print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[73]:
# get ci for single system for table 2
import pandas as pd
input_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/'
file = 'single_system_summary_new.csv'
# change metric here
print('Single system significance within corpus by max metric and all groups within system:')
corpora = ['fairview', 'i2b2', 'mipacq']
m_labels = ['F1', 'precision', 'recall']
systems = ['biomedicus','clamp','ctakes','metamap','quick_umls']
for corpus in corpora:
for sys in systems:
print('CORPUS:', corpus)
for m_label in m_labels:
df = pd.read_csv(input_dir + file)
df = df[df['corpus']==corpus]
df = df[df['system']==sys]
metric = list()
ci = list()
# entire collection:
for row in df.itertuples():
#print(row.TP, row.FN, row.FP)
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.system, row.corpus, row.semtypes, row.F1))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.system, row.corpus, row.semtypes, row.precision))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.system, row.corpus, row.semtypes, row.recall))
metric.append(m)
# SS for max F1
M = max(metric)
c_i = None
for c in ci:
if M == c[5]:
c_i = (c[0], c[1])
print('st max:', m_label, corpus)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
## SS wrt "All groups"
c_i = None
for c in ci:
if 'all' == c[4]:
c_i = (c[0], c[1])
print('st all:')
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[74]:
# get ci for single system for table 2
import pandas as pd
input_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/'
file = 'single_system_summary_new.csv'
# change metric here
print('Single system significance within corpus by max metric and all groups across systems:')
corpora = ['fairview', 'i2b2', 'mipacq']
m_labels = ['F1', 'precision', 'recall']
for corpus in corpora:
print('CORPUS:', corpus)
for m_label in m_labels:
df = pd.read_csv(input_dir + file)
df = df[df['corpus']==corpus]
metric = list()
ci = list()
# entire collection:
for row in df.itertuples():
#print(row.TP, row.FN, row.FP)
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.system, row.corpus, row.semtypes, row.F1))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.system, row.corpus, row.semtypes, row.precision))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.system, row.corpus, row.semtypes, row.recall))
metric.append(m)
# SS for max F1
M = max(metric)
c_i = None
for c in ci:
if M == c[5]:
c_i = (c[0], c[1])
print('st max:', m_label, corpus)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
## SS wrt "All groups"
c_i = None
for c in ci:
if 'all' == c[4]:
c_i = (c[0], c[1])
print('st all:')
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[75]:
df = pd.read_csv(input_dir + file)
semtypes = ['Anatomy',
'Chemicals&Drugs',
'Findings',
'Procedures',
'all']
m_labels = ['F1', 'precision', 'recall']
print('-----------------')
print('Single system significance across biased st:')
for s in semtypes:
for m_label in m_labels:
metric = list()
ci = list()
# change metric here
df = pd.read_csv(input_dir + file)
df = df[df['semtypes'] == s]
for row in df.itertuples():
#print(row.TP, row.FN, row.FP)
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.system, row.corpus, row.semtypes, row.F1))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.system, row.corpus, row.semtypes, row.precision))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.system, row.corpus, row.semtypes, row.recall))
metric.append(m)
M = max(metric)
c_i = None
for c in ci:
if M == c[5]:
c_i = (c[0], c[1])
print('st max:', m_label, s)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('Single system significance across st minus biased systems:')
for s in semtypes:
for m_label in m_labels:
metric = list()
ci = list()
df = pd.read_csv(input_dir + file)
df = df[df['semtypes'] == s]
for row in df.itertuples():
#print(row.TP, row.FN, row.FP)
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if (row.corpus == 'fairview') or (row.system != 'clamp' and row.corpus == 'i2b2') or (row.system not in ['biomedicus', 'ctakes'] and row.corpus == 'mipacq'):
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.system, row.corpus, row.semtypes, row.F1))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.system, row.corpus, row.semtypes, row.precision))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.system, row.corpus, row.semtypes, row.recall))
metric.append(m)
print(max(metric))
M = max(metric)
c_i = None
for c in ci:
if M == c[5]:
c_i = (c[0], c[1])
print('st max:', m_label, s)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[76]:
# by corpus/semtype all ensembles, including single sys
input_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/overlap/combined/analysis/'
m_labels = ['F', 'precision', 'recall']
print('Within corpus/st ensembles:')
for file in glob.glob(input_dir + '*.csv'):
df = pd.read_csv(file)
df = df.drop_duplicates(subset=['F', 'precision', 'recall'])
for m_label in m_labels:
print(m_label,':', file)
metric = list()
ci = list()
for row in df.itertuples():
#print(row.TP, row.FN, row.FP)
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df1, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if ('fairview' in file) or ('clamp' not in row.merge and 'i2b2' in file) or (('biomedicus' not in row.merge and 'ctakes' not in row.merge) and 'mipacq' in file):
if m_label == 'F':
m = row.F
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.merge, row.F))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.merge, row.precision))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.merge, row.recall))
metric.append(m)
M = max(metric)
c_i = None
for c in ci:
if M == c[3]:
c_i = (c[0], c[1])
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('--------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[78]:
# by max merges within corpus, across corpora(?)
data_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/'
file = 'max_merge_summary_new.xlsx'
corpora = ['fairview', 'i2b2', 'mipacq']
m_labels = ['F1', 'precision', 'recall']
print('Within corpus significance max merges:')
for corpus in corpora:
print('CORPUS:', corpus)
for m_label in m_labels:
if m_label == 'F1':
sheet_name='max F-score'
elif m_label == 'precision':
sheet_name='max precision'
elif m_label == 'recall':
sheet_name='max recall'
df = pd.read_excel(open(data_dir + file, 'rb'), sheet_name=sheet_name)
df = df[df['corpus'] == corpus]
metric = list()
ci = list()
# entire collection:
for row in df.itertuples():
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df1, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.F1, row.merge, row.corpus, row.semtypes))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.precision, row.merge, row.corpus, row.semtypes))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.recall, row.merge, row.corpus, row.semtypes))
metric.append(m)
M = max(metric)
c_i = None
for c in ci:
#print(c)
if M == c[2]:
c_i = (c[0], c[1])
print('st max:', m_label, corpus)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
## SS wrt "All groups"
c_i = None
for c in ci:
if 'all' == c[5]:
c_i = (c[0], c[1])
print('st all:')
for c in ci:
# if c[0] <= F <= c[1]:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[64]:
# by max merges within corpus, across corpora(?)
data_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/'
file = 'max_merge_summary_new_mipacq.xlsx'
m_labels = ['F1', 'precision', 'recall']
print('Within corpus significance max merges unbiased mipacq:')
for m_label in m_labels:
if m_label == 'F1':
sheet_name='max F-score'
elif m_label == 'precision':
sheet_name='max precision'
elif m_label == 'recall':
sheet_name='max recall'
df = pd.read_excel(open(data_dir + file, 'rb'), sheet_name=sheet_name)
metric = list()
ci = list()
# entire collection:
for row in df.itertuples():
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df1, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.F1, row.merge, row.corpus, row.semtypes))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.precision, row.merge, row.corpus, row.semtypes))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.recall, row.merge, row.corpus, row.semtypes))
metric.append(m)
M = max(metric)
c_i = None
for c in ci:
#print(c)
if M == c[2]:
c_i = (c[0], c[1])
print('st max:', m_label, corpus)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
## SS wrt "All groups"
c_i = None
for c in ci:
if 'all' == c[5]:
c_i = (c[0], c[1])
print('st all:')
for c in ci:
# if c[0] <= F <= c[1]:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[79]:
# by max merges within corpus, across corpora(?)
data_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/'
file = 'max_merge_summary_new_i2b2.xlsx'
m_labels = ['F1', 'precision', 'recall']
print('Within corpus significance max merges unbiased i2b2:')
for m_label in m_labels:
if m_label == 'F1':
sheet_name='max F-score'
elif m_label == 'precision':
sheet_name='max precision'
elif m_label == 'recall':
sheet_name='max recall'
df = pd.read_excel(open(data_dir + file, 'rb'), sheet_name=sheet_name)
metric = list()
ci = list()
# entire collection:
for row in df.itertuples():
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df1, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
if m_label == 'F1':
m = row.F1
ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.F1, row.merge, row.corpus, row.semtypes))
elif m_label == 'precision':
m = row.precision
ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.precision, row.merge, row.corpus, row.semtypes))
elif m_label == 'recall':
m = row.recall
ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.recall, row.merge, row.corpus, row.semtypes))
metric.append(m)
M = max(metric)
c_i = None
for c in ci:
#print(c)
if M == c[2]:
c_i = (c[0], c[1])
print('st max:', m_label, corpus)
for c in ci:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
## SS wrt "All groups"
c_i = None
for c in ci:
if 'all' == c[5]:
c_i = (c[0], c[1])
print('st all:')
for c in ci:
# if c[0] <= F <= c[1]:
if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
print(round(M, 3), c)
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[26]:
# get var for single system
import pandas as pd
input_dir = '/Users/gms/development/nlp/nlpie/data/ensembling-u01/output/submission/'
file = 'single_system_summary_new.csv'
# change metric here
m_labels = ['F1']
corpora = ['fairview', 'i2b2', 'mipacq']
systems = ['biomedicus', 'clamp', 'ctakes', 'metamap', 'quick_umls']
semtypes = ['Anatomy',
'Findings',
'Chemicals&Drugs',
'Procedures',
'all']
print('Single system F1-score, n_sys and variance by corpus, semantic aggregation, and system:')
for corpus in corpora:
for system in systems:
for st in semtypes:
#print('CORPUS:', corpus, st, system)
data = pd.read_csv(input_dir + file)
data = data[data['corpus']==corpus]
data = data[data['semtypes'] == st]
data = data[data['system'] == system]
if not data.empty:
metric = list()
ci = list()
# entire collection:
for row in data.itertuples():
tp = row.TP
fn = row.FN
recall_obs = tp + fn
fp = row.FP
precision_obs = tp + fp
[r, dr, r_upper_bound, r_lower_bound] = normal_approximation_binomial_confidence_interval(tp, recall_obs)
[p, dp, p_upper_bound, p_lower_bound] = normal_approximation_binomial_confidence_interval(tp, precision_obs)
[f, df, f_upper_bound, f_lower_bound] = f1_score_confidence_interval(r, p, dr, dp)
var_lower = f - f_upper_bound
var_upper = f_lower_bound - f
# print(var_lower == var_upper)
if var_lower == var_upper:
var = var_lower
print(row.F1, row.n_sys, var, corpus, st, system)
# ci.append((round(f_upper_bound, 3),round(f_lower_bound, 3), row.system, row.corpus, row.semtypes, row.F1))
# # elif m_label == 'precision':
# # m = row.precision
# # ci.append((round(p_upper_bound, 3),round(p_lower_bound, 3), row.system, row.corpus, row.semtypes, row.precision))
# # elif m_label == 'recall':
# # m = row.recall
# # ci.append((round(r_upper_bound, 3),round(r_lower_bound, 3), row.system, row.corpus, row.semtypes, row.recall))
# metric.append(m)
# # SS for max F1
# M = max(metric)
# c_i = None
# for c in ci:
# if M == c[5]:
# c_i = (c[0], c[1])
# print('st max:', m_label, corpus)
# for c in ci:
# if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
# print(round(M, 3), c)
# # ## SS wrt "All groups"
# # c_i = None
# # for c in ci:
# # if 'all' == c[4]:
# # c_i = (c[0], c[1])
# # print('st all:')
# # for c in ci:
# # # if c[0] <= F <= c[1]:
# # if (c_i[0] <= c[0] and c_i[1] > c[0]) or (c_i[0] >= c[0] and c_i[0] < c[1]):
# # print(round(M, 3), c)
# print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
print('-----------------')
# In[ ]:
#
| 33.86169
| 173
| 0.535205
| 4,269
| 30,848
| 3.685406
| 0.074022
| 0.012966
| 0.009725
| 0.012966
| 0.810843
| 0.793746
| 0.784212
| 0.775504
| 0.759359
| 0.748363
| 0
| 0.025582
| 0.305595
| 30,848
| 910
| 174
| 33.898901
| 0.708884
| 0.139134
| 0
| 0.845343
| 0
| 0.015817
| 0.135274
| 0.037648
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003515
| false
| 0
| 0.056239
| 0
| 0.063269
| 0.16696
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8e4403ad910c645f4d0f86ea0eacf955c55132d
| 115
|
py
|
Python
|
src/stk/molecular/topology_graphs/metal_complex/octahedral/__init__.py
|
andrewtarzia/stk
|
1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a
|
[
"MIT"
] | 21
|
2018-04-12T16:25:24.000Z
|
2022-02-14T23:05:43.000Z
|
src/stk/molecular/topology_graphs/metal_complex/octahedral/__init__.py
|
JelfsMaterialsGroup/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 8
|
2019-03-19T12:36:36.000Z
|
2020-11-11T12:46:00.000Z
|
src/stk/molecular/topology_graphs/metal_complex/octahedral/__init__.py
|
supramolecular-toolkit/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 5
|
2018-08-07T13:00:16.000Z
|
2021-11-01T00:55:10.000Z
|
from .octahedral_lambda import * # noqa
from .octahedral_delta import * # noqa
from .octahedral import * # noqa
| 28.75
| 40
| 0.73913
| 14
| 115
| 5.928571
| 0.428571
| 0.506024
| 0.337349
| 0.578313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182609
| 115
| 3
| 41
| 38.333333
| 0.882979
| 0.121739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
77155e7461b467ec2b62aba6e9224451373f84cb
| 7,444
|
py
|
Python
|
tests/em/fdem/forward/test_FDEM_forwardHB.py
|
kimjaed/simpeg
|
b8d716f86a4ea07ba3085fabb24c2bc974788040
|
[
"MIT"
] | 3
|
2020-11-27T03:18:28.000Z
|
2022-03-18T01:29:58.000Z
|
tests/em/fdem/forward/test_FDEM_forwardHB.py
|
kimjaed/simpeg
|
b8d716f86a4ea07ba3085fabb24c2bc974788040
|
[
"MIT"
] | null | null | null |
tests/em/fdem/forward/test_FDEM_forwardHB.py
|
kimjaed/simpeg
|
b8d716f86a4ea07ba3085fabb24c2bc974788040
|
[
"MIT"
] | 1
|
2020-05-26T17:00:53.000Z
|
2020-05-26T17:00:53.000Z
|
import unittest
from SimPEG import EM
from scipy.constants import mu_0
from SimPEG.EM.Utils.testingUtils import getFDEMProblem, crossCheckTest
testEB = True
testHJ = True
testEJ = True
testBH = True
verbose = False
TOLEJHB = 1 # averaging and more sensitive to boundary condition violations (ie. the impact of violating the boundary conditions in each case is different.)
#TODO: choose better testing parameters to lower this
SrcList = ['RawVec', 'MagDipole_Bfield', 'MagDipole', 'CircularLoop']
class FDEM_CrossCheck(unittest.TestCase):
if testBH:
def test_BH_CrossCheck_jxr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jxr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jyr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jyr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jzr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jzr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jxi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jxi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jyi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jyi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jzi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jzi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_exr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'exr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_eyr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'eyr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_ezr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'ezr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_exi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'exi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_eyi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'eyi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_ezi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'ezi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bxr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bxr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_byr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'byr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bzr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bzr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bxi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bxi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_byi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'byi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bzi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bzi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hxr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hxr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hyr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hyr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hzr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hzr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hxi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hxi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hyi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hyi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hzi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hzi', verbose=verbose, TOL=TOLEJHB))
if testBH:
def test_BH_CrossCheck_jxr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jxr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jyr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jyr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jzr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jzr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jxi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jxi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jyi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jyi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_jzi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'jzi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_exr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'exr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_eyr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'eyr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_ezr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'ezr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_exi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'exi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_eyi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'eyi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_ezi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'ezi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bxr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bxr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_byr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'byr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bzr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bzr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bxi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bxi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_byi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'byi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_bzi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'bzi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hxr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hxr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hyr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hyr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hzr(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hzr', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hxi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hxi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hyi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hyi', verbose=verbose, TOL=TOLEJHB))
def test_BH_CrossCheck_hzi(self):
self.assertTrue(crossCheckTest(SrcList, 'b', 'h', 'hzi', verbose=verbose, TOL=TOLEJHB))
if __name__ == '__main__':
unittest.main()
| 58.614173
| 156
| 0.663756
| 897
| 7,444
| 5.335563
| 0.102564
| 0.070205
| 0.090263
| 0.190556
| 0.906394
| 0.906394
| 0.906394
| 0.906394
| 0.906394
| 0.906394
| 0
| 0.000334
| 0.194653
| 7,444
| 126
| 157
| 59.079365
| 0.797998
| 0.026061
| 0
| 0.875
| 0
| 0
| 0.040155
| 0
| 0
| 0
| 0
| 0.007937
| 0.428571
| 1
| 0.428571
| false
| 0
| 0.035714
| 0
| 0.473214
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
91f0f39f30ddeeb103438b4c1e2068d0ce6faec6
| 17,954
|
py
|
Python
|
test/test_cursor.py
|
memgraph/pymgclient
|
eb92b7613716f65414dd2241d0721bf066433eb8
|
[
"Apache-2.0"
] | 35
|
2019-12-30T09:36:25.000Z
|
2022-03-16T01:21:27.000Z
|
test/test_cursor.py
|
memgraph/pymgclient
|
eb92b7613716f65414dd2241d0721bf066433eb8
|
[
"Apache-2.0"
] | 19
|
2021-04-15T11:25:33.000Z
|
2022-03-23T16:16:25.000Z
|
test/test_cursor.py
|
memgraph/pymgclient
|
eb92b7613716f65414dd2241d0721bf066433eb8
|
[
"Apache-2.0"
] | 2
|
2019-08-21T11:51:56.000Z
|
2021-07-17T18:40:58.000Z
|
# Copyright (c) 2016-2020 Memgraph Ltd. [https://memgraph.com]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import mgclient
import pytest
from common import start_memgraph, Memgraph
@pytest.fixture(scope="function")
def memgraph_server():
memgraph = start_memgraph()
yield memgraph.host, memgraph.port, memgraph.sslmode(), memgraph.is_long_running
memgraph.kill()
def test_cursor_visibility(memgraph_server):
host, port, sslmode, is_long_running = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor1 = conn.cursor()
cursor1.execute("MATCH (n) RETURN count(n)")
original_count = cursor1.fetchall()[0][0]
assert is_long_running or original_count == 0
cursor1.execute("CREATE (:Node)")
cursor2 = conn.cursor()
cursor2.execute("MATCH (n) RETURN count(n)")
assert cursor2.fetchall() == [(original_count + 1,)]
class TestCursorInRegularConnection:
def test_execute_closed_connection(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
conn.close()
with pytest.raises(mgclient.InterfaceError):
cursor.execute("RETURN 100")
def test_cursor_close(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
cursor.close()
# closing again does nothing
cursor.close()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchone()
with pytest.raises(mgclient.InterfaceError):
cursor.execute("RETURN 100")
with pytest.raises(mgclient.InterfaceError):
cursor.fetchmany()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchall()
with pytest.raises(mgclient.InterfaceError):
cursor.setinputsizes([])
with pytest.raises(mgclient.InterfaceError):
cursor.setoutputsizes(100)
def test_cursor_fetchone(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchone()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
for n in range(1, 11):
assert cursor.fetchone() == (n,)
assert cursor.fetchone() is None
assert cursor.fetchone() is None
cursor.execute("RETURN 100")
assert cursor.fetchone() == (100,)
assert cursor.fetchone() is None
def test_cursor_fetchmany(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchmany()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
with pytest.raises(OverflowError):
cursor.fetchmany(10 ** 100)
assert cursor.fetchmany() == [(1,)]
cursor.arraysize = 4
assert cursor.fetchmany() == [(2,), (3,), (4,), (5,)]
assert cursor.fetchmany() == [(6,), (7,), (8,), (9,)]
assert cursor.fetchmany() == [(10,)]
assert cursor.fetchmany() == []
assert cursor.fetchone() is None
cursor.execute("RETURN 100")
assert cursor.fetchmany() == [(100,)]
assert cursor.fetchmany() == []
assert cursor.fetchone() is None
def test_cursor_fetchall(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchall()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
assert cursor.fetchall() == [(n,) for n in range(1, 11)]
assert cursor.fetchall() == []
assert cursor.fetchone() is None
cursor.execute("RETURN 100")
assert cursor.fetchall() == [(100,)]
assert cursor.fetchall() == []
assert cursor.fetchone() is None
def test_cursor_multiple_queries(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor1 = conn.cursor()
cursor2 = conn.cursor()
cursor1.execute("UNWIND range(1, 10) AS n RETURN n")
cursor2.execute("UNWIND range(1, 10) AS n RETURN n")
for n in range(1, 11):
assert cursor1.fetchone() == (n,)
assert cursor2.fetchone() == (n,)
def test_cursor_syntax_error(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("RETURN 100")
with pytest.raises(mgclient.DatabaseError):
cursor.execute("fjdkalfjdsalfaj")
with pytest.raises(mgclient.InterfaceError):
cursor.fetchall()
def test_cursor_runtime_error(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("RETURN 100")
with pytest.raises(mgclient.DatabaseError):
cursor.execute("UNWIND [true, true, false] AS p RETURN assert(p)")
cursor.fetchall()
cursor.execute("RETURN 200")
assert cursor.fetchall() == [(200,)]
def test_cursor_description(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("RETURN 5 AS x, 6 AS y")
assert len(cursor.description) == 2
assert cursor.description[0].name == "x"
assert cursor.description[1].name == "y"
with pytest.raises(mgclient.DatabaseError):
cursor.execute("jdfklfjkdalfja")
assert cursor.description is None
def test_cursor_fetchone_without_result(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("MATCH (n:NonExistingLabel) RETURN n")
result = cursor.fetchone()
assert result is None
def test_cursor_fetchmany_without_result(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("MATCH (n:NonExistingLabel) RETURN n")
assert cursor.fetchmany() == []
def test_cursor_result_ref_counts(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("UNWIND [1, 2, 3, 4, 5] AS n RETURN n")
fetchone_result = cursor.fetchone()
# Refs are the following:
# 1. fetchone_result
# 2. temp reference in sys.getrefcount
# 3. cursor->rows
assert sys.getrefcount(fetchone_result) == 3
fetchmany_result = cursor.fetchmany(2)
# Refs are the following:
# 1. fetchmany_result
# 2. temp reference in sys.getrefcount
assert sys.getrefcount(fetchmany_result) == 2
row1 = fetchmany_result[0]
row2 = fetchmany_result[1]
del fetchmany_result
# Refs are the following:
# 1. row{1,2}
# 2. temp reference in sys.getrefcount
# 3. cursor->rows
assert sys.getrefcount(row1) == 3
assert sys.getrefcount(row2) == 3
fetchall_result = cursor.fetchall()
# Refs are the following:
# 1. fetchall_result
# 2. temp reference in sys.getrefcount
assert sys.getrefcount(fetchall_result) == 2
row1 = fetchall_result[0]
row2 = fetchall_result[1]
del fetchall_result
# Refs are the following:
# 1. row{1,2}
# 2. temp reference in sys.getrefcount
# 3. cursor->rows
assert sys.getrefcount(row1) == 3
assert sys.getrefcount(row2) == 3
class TestCursorInAsyncConnection:
def test_cursor_close(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
cursor2 = conn.cursor()
with pytest.raises(mgclient.InterfaceError):
cursor.close()
cursor2.close()
# NOTE: This here is a bit strange again because of double fetch /
# server ahead of time pull because of the need for has_more info. As
# soon as the last record is returned, the cursor will become
# closeable.
assert cursor.fetchmany(9) == [(n,) for n in range(1, 10)]
with pytest.raises(mgclient.InterfaceError):
cursor.close()
assert cursor.fetchone() == (10,)
assert cursor.fetchone() is None
cursor.close()
# closing again does nothing
cursor.close()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchone()
with pytest.raises(mgclient.InterfaceError):
cursor.execute("RETURN 100")
with pytest.raises(mgclient.InterfaceError):
cursor.fetchmany()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchall()
with pytest.raises(mgclient.InterfaceError):
cursor.setinputsizes([])
with pytest.raises(mgclient.InterfaceError):
cursor.setoutputsizes(100)
def test_cursor_multiple_queries(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor1 = conn.cursor()
cursor2 = conn.cursor()
cursor1.execute("UNWIND range(1, 10) AS n RETURN n")
with pytest.raises(mgclient.InterfaceError):
cursor2.execute("UNWIND range(1, 10) AS n RETURN n")
assert cursor1.fetchall() == [(n,) for n in range(1, 11)]
with pytest.raises(mgclient.InterfaceError):
cursor2.fetchall()
def test_cursor_fetchone(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchone()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
for n in range(1, 11):
assert cursor.fetchone() == (n,)
assert cursor.fetchone() is None
assert cursor.fetchone() is None
cursor.execute("RETURN 100")
assert cursor.fetchone() == (100,)
assert cursor.fetchone() is None
def test_cursor_fetchmany(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchmany()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
with pytest.raises(OverflowError):
cursor.fetchmany(10 ** 100)
assert cursor.fetchmany() == [(1,)]
cursor.arraysize = 4
assert cursor.fetchmany() == [(2,), (3,), (4,), (5,)]
assert cursor.fetchmany() == [(6,), (7,), (8,), (9,)]
assert cursor.fetchmany() == [(10,)]
assert cursor.fetchmany() == []
assert cursor.fetchone() is None
cursor.execute("RETURN 100")
assert cursor.fetchmany() == [(100,)]
assert cursor.fetchmany() == []
assert cursor.fetchone() is None
def test_cursor_fetchall(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
with pytest.raises(mgclient.InterfaceError):
cursor.fetchall()
cursor.execute("UNWIND range(1, 10) AS n RETURN n")
assert cursor.fetchall() == [(n,) for n in range(1, 11)]
assert cursor.fetchall() == []
assert cursor.fetchone() is None
cursor.execute("RETURN 100")
assert cursor.fetchall() == [(100,)]
assert cursor.fetchall() == []
assert cursor.fetchone() is None
def test_cursor_syntax_error(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("RETURN 100")
cursor.fetchall()
with pytest.raises(mgclient.DatabaseError):
cursor.execute("fjdkalfjdsalfaj")
with pytest.raises(mgclient.InterfaceError):
cursor.fetchall()
def test_cursor_runtime_error(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("RETURN 100")
assert cursor.fetchall() == [(100,)]
cursor.execute("UNWIND [true, true, false] AS p RETURN assert(p)")
with pytest.raises(mgclient.DatabaseError):
assert cursor.fetchone() == (True,)
# NOTE: The exception is going to happen here which is unexpected.
# The reason for that is because server pulls one more result ahead
# of time to know are there more results.
assert cursor.fetchone() == (True,) # <- HERE
cursor.fetchone()
cursor.execute("UNWIND [true, true, false] AS p RETURN assert(p)")
with pytest.raises(mgclient.DatabaseError):
cursor.fetchmany(5)
cursor.execute("UNWIND [true, true, false] AS p RETURN assert(p)")
with pytest.raises(mgclient.DatabaseError):
cursor.fetchall()
def test_cursor_description(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("RETURN 5 AS x, 6 AS y")
assert len(cursor.description) == 2
assert cursor.description[0].name == "x"
assert cursor.description[1].name == "y"
cursor.fetchone()
assert len(cursor.description) == 2
assert cursor.description[0].name == "x"
assert cursor.description[1].name == "y"
cursor.fetchone()
with pytest.raises(mgclient.DatabaseError):
cursor.execute("jdfklfjkdalfja")
assert cursor.description is None
def test_cursor_fetchone_without_result(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("MATCH (n:NonExistingLabel) RETURN n")
result = cursor.fetchone()
assert result is None
def test_cursor_fetchmany_without_result(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("MATCH (n:NonExistingLabel) RETURN n")
assert cursor.fetchmany() == []
def test_cursor_result_ref_counts(self, memgraph_server):
host, port, sslmode, _ = memgraph_server
conn = mgclient.connect(host=host, port=port, lazy=True, sslmode=sslmode)
cursor = conn.cursor()
cursor.execute("UNWIND [1, 2, 3, 4, 5] AS n RETURN n")
fetchone_result = cursor.fetchone()
# Refs are the following:
# 1. fetchone_result
# 2. temp reference in sys.getrefcount
assert sys.getrefcount(fetchone_result) == 2
fetchmany_result = cursor.fetchmany(2)
# Refs are the following:
# 1. fetchmany_result
# 2. temp reference in sys.getrefcount
assert sys.getrefcount(fetchmany_result) == 2
row1 = fetchmany_result[0]
row2 = fetchmany_result[1]
del fetchmany_result
# Refs are the following:
# 1. row{1,2}
# 2. temp reference in sys.getrefcount
assert sys.getrefcount(row1) == 2
assert sys.getrefcount(row2) == 2
fetchall_result = cursor.fetchall()
# Refs are the following:
# 1. fetchall_result
# 2. temp reference in sys.getrefcount
assert sys.getrefcount(fetchall_result) == 2
row1 = fetchall_result[0]
row2 = fetchall_result[1]
del fetchall_result
# Refs are the following:
# 1. row{1,2}
# 2. temp reference in sys.getrefcount
assert sys.getrefcount(row1) == 2
assert sys.getrefcount(row2) == 2
| 33.684803
| 84
| 0.628718
| 2,073
| 17,954
| 5.357935
| 0.097443
| 0.061583
| 0.050419
| 0.071306
| 0.86531
| 0.858198
| 0.842892
| 0.83758
| 0.828577
| 0.823175
| 0
| 0.022433
| 0.260109
| 17,954
| 532
| 85
| 33.74812
| 0.813686
| 0.10254
| 0
| 0.901198
| 0
| 0
| 0.069631
| 0
| 0
| 0
| 0
| 0
| 0.254491
| 1
| 0.07485
| false
| 0
| 0.011976
| 0
| 0.092814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6264074de4bd0d3722084b3af32a37261a78ae63
| 151
|
py
|
Python
|
roll.py
|
rec/pickett
|
e6f890f13a39d439dfc778df2a23829f86eb945b
|
[
"Artistic-2.0"
] | 2
|
2019-05-26T15:11:25.000Z
|
2019-06-15T10:18:35.000Z
|
roll.py
|
rec/pickett
|
e6f890f13a39d439dfc778df2a23829f86eb945b
|
[
"Artistic-2.0"
] | null | null | null |
roll.py
|
rec/pickett
|
e6f890f13a39d439dfc778df2a23829f86eb945b
|
[
"Artistic-2.0"
] | null | null | null |
import random
def roll3d6():
return random.choice(1, 6) + random.choice(1, 6) + random.choice(1, 6)
def rolld100():
return random.choice(1, 100)
| 18.875
| 72
| 0.682119
| 24
| 151
| 4.291667
| 0.416667
| 0.466019
| 0.504854
| 0.407767
| 0.407767
| 0.407767
| 0.407767
| 0.407767
| 0
| 0
| 0
| 0.11811
| 0.15894
| 151
| 7
| 73
| 21.571429
| 0.692913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
6273a2e2d0c2fe79848df2f0f870453bad8ad477
| 165
|
py
|
Python
|
qppwg/utils/__init__.py
|
entn-at/QPPWG
|
fa54a75071e43f7a0233debd6057a3be65cda276
|
[
"MIT"
] | 46
|
2020-05-22T05:58:42.000Z
|
2021-11-25T11:56:07.000Z
|
qppwg/utils/__init__.py
|
entn-at/QPPWG
|
fa54a75071e43f7a0233debd6057a3be65cda276
|
[
"MIT"
] | 5
|
2020-11-04T12:48:45.000Z
|
2021-06-02T06:08:22.000Z
|
qppwg/utils/__init__.py
|
entn-at/QPPWG
|
fa54a75071e43f7a0233debd6057a3be65cda276
|
[
"MIT"
] | 6
|
2020-05-22T12:17:36.000Z
|
2021-06-06T14:03:55.000Z
|
from qppwg.utils.utils import * # NOQA
from qppwg.utils.filters import * # NOQA
from qppwg.utils.features import * # NOQA
from qppwg.utils.index import * # NOQA
| 33
| 42
| 0.733333
| 24
| 165
| 5.041667
| 0.333333
| 0.297521
| 0.46281
| 0.471074
| 0.595041
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169697
| 165
| 4
| 43
| 41.25
| 0.883212
| 0.115152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
65d3b4ed587b183113f4ca81e599c913baeb49a9
| 697
|
py
|
Python
|
exceptions.py
|
Tang142857/MyEditor
|
2d532eecfa6c48719cf6db99495a910ddd0ff52c
|
[
"MulanPSL-1.0"
] | null | null | null |
exceptions.py
|
Tang142857/MyEditor
|
2d532eecfa6c48719cf6db99495a910ddd0ff52c
|
[
"MulanPSL-1.0"
] | null | null | null |
exceptions.py
|
Tang142857/MyEditor
|
2d532eecfa6c48719cf6db99495a910ddd0ff52c
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
TextbookChecker
this exceptions file include all exception in apply.py
@author: Tang142857
Copyright(c) DFSA Software Develop Center
"""
class CloseFileException(BaseException):
def __init__(self, message):
super().__init__()
self.message = message
def __str__(self):
return self.message
class OpenFileException(BaseException):
def __init__(self, message):
super().__init__()
self.message = message
def __str__(self):
return self.message
class SaveFileException(BaseException):
def __init__(self, message):
super().__init__()
self.message = message
def __str__(self):
return self.message
| 20.5
| 54
| 0.674319
| 72
| 697
| 6.027778
| 0.416667
| 0.228111
| 0.207373
| 0.165899
| 0.610599
| 0.610599
| 0.610599
| 0.610599
| 0.610599
| 0.610599
| 0
| 0.011215
| 0.232425
| 697
| 34
| 55
| 20.5
| 0.8
| 0.190818
| 0
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
65d597070ad22cc958450e11cf3e2f8eae3d2cb3
| 119
|
py
|
Python
|
pettingzoo/butterfly/cooperative_pong_v5.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | 1
|
2022-01-19T17:50:55.000Z
|
2022-01-19T17:50:55.000Z
|
pettingzoo/butterfly/cooperative_pong_v5.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | null | null | null |
pettingzoo/butterfly/cooperative_pong_v5.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | null | null | null |
from .cooperative_pong import manual_control
from .cooperative_pong.cooperative_pong import env, parallel_env, raw_env
| 39.666667
| 73
| 0.87395
| 17
| 119
| 5.764706
| 0.529412
| 0.459184
| 0.387755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084034
| 119
| 2
| 74
| 59.5
| 0.899083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
02cc00f9edba3aacf766e5f3ac023d98e8e3b9fd
| 44
|
py
|
Python
|
d3rlpy/metrics/__init__.py
|
jamartinh/d3rlpy
|
87f478451674ef769eb8ce74e3663c4d3b1c325d
|
[
"MIT"
] | null | null | null |
d3rlpy/metrics/__init__.py
|
jamartinh/d3rlpy
|
87f478451674ef769eb8ce74e3663c4d3b1c325d
|
[
"MIT"
] | 1
|
2020-11-17T22:35:50.000Z
|
2020-11-17T22:35:50.000Z
|
d3rlpy/metrics/__init__.py
|
jamartinh/d3rlpy
|
87f478451674ef769eb8ce74e3663c4d3b1c325d
|
[
"MIT"
] | null | null | null |
from . import scorer
from . import comparer
| 14.666667
| 22
| 0.772727
| 6
| 44
| 5.666667
| 0.666667
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 44
| 2
| 23
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f3075adf36e56097b45ef66bb207cc8c53ad4c72
| 135
|
py
|
Python
|
kore/components/factories.py
|
p1c2u/kore
|
5959afc331a13ad18a5e697a1d69e76d71576f86
|
[
"Apache-2.0"
] | 3
|
2017-03-14T10:54:57.000Z
|
2018-05-07T13:50:59.000Z
|
kore/components/factories.py
|
p1c2u/kore
|
5959afc331a13ad18a5e697a1d69e76d71576f86
|
[
"Apache-2.0"
] | 8
|
2017-03-14T10:52:07.000Z
|
2017-09-10T21:26:28.000Z
|
kore/components/factories.py
|
p1c2u/kore
|
5959afc331a13ad18a5e697a1d69e76d71576f86
|
[
"Apache-2.0"
] | null | null | null |
class ComponentFactory(object):
def create(self, component_class, namespace):
return component_class(namespace=namespace)
| 27
| 51
| 0.762963
| 14
| 135
| 7.214286
| 0.642857
| 0.277228
| 0.455446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 135
| 4
| 52
| 33.75
| 0.885965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b845f72d3d723d2a71617d783360a9153db2e4f0
| 1,719
|
py
|
Python
|
LeetCode/Problems/5. Longest Palindromic Substring.py
|
nikku1234/Code-Practise
|
94eb6680ea36efd10856c377000219285f77e5a4
|
[
"Apache-2.0"
] | 9
|
2020-07-02T06:06:17.000Z
|
2022-02-26T11:08:09.000Z
|
LeetCode/Problems/5. Longest Palindromic Substring.py
|
nikku1234/Code-Practise
|
94eb6680ea36efd10856c377000219285f77e5a4
|
[
"Apache-2.0"
] | 1
|
2021-11-04T17:26:36.000Z
|
2021-11-04T17:26:36.000Z
|
LeetCode/Problems/5. Longest Palindromic Substring.py
|
nikku1234/Code-Practise
|
94eb6680ea36efd10856c377000219285f77e5a4
|
[
"Apache-2.0"
] | 8
|
2021-01-31T10:31:12.000Z
|
2022-03-13T09:15:55.000Z
|
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
#center and expand to left and right
#return length, left_index,right_index
def longestIndex(s,l,r):
while l >= 0 and r < len(s) and s[l] == s[r]:
l -= 1
r += 1
l += 1
r -= 1
return( r - l + 1, l, r)
longest = 0
left = 0
right = -1
for i in xrange(len(s)):
#odd case
length,l,r = longestIndex(s,i,i)
if length >longest:
longest = length
left = l
right = r
#even case
length,l,r = longestIndex(s,i,i+1)
if length >longest:
longest = length
left = l
right = r
return s[left:right+1]
# Both checking using a loop
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
time : O(n^2)
space : O(1)
"""
#return length, left_index,right_index
def longestIndex(s,l,r):
while l >= 0 and r < len(s) and s[l] == s[r]:
l -= 1
r += 1
l += 1
r -= 1
return( r - l + 1, l, r)
longest = 0
left = 0
right = -1
for i in xrange(len(s)):
for j in xrange(2):
length,l,r = longestIndex(s,i,i+j)
if length >longest:
longest = length
left = l
right = r
return s[left:right+1]
| 24.211268
| 57
| 0.400814
| 210
| 1,719
| 3.261905
| 0.219048
| 0.020438
| 0.017518
| 0.023358
| 0.872993
| 0.872993
| 0.872993
| 0.839416
| 0.760584
| 0.70365
| 0
| 0.027618
| 0.494474
| 1,719
| 70
| 58
| 24.557143
| 0.760644
| 0.133799
| 0
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b871aeb2aefc8cd1e95b05393454407dcbd1acd1
| 110
|
py
|
Python
|
sktps/ps/task.py
|
jclee81/sktacc
|
6f601ce8f61b4e361b17773060ee2544bf35dbe4
|
[
"Apache-2.0"
] | 2
|
2017-08-03T06:03:25.000Z
|
2017-08-10T08:55:22.000Z
|
sktps/ps/task.py
|
jclee81/sktacc
|
6f601ce8f61b4e361b17773060ee2544bf35dbe4
|
[
"Apache-2.0"
] | 8
|
2020-01-28T21:45:44.000Z
|
2022-02-09T23:27:06.000Z
|
sktps/ps/task.py
|
jclee81/sktacc
|
6f601ce8f61b4e361b17773060ee2544bf35dbe4
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
def tf_average(task_input):
# TODO: calculate it!
return True
| 15.714286
| 37
| 0.745455
| 15
| 110
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 110
| 6
| 38
| 18.333333
| 0.852273
| 0.172727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
b8a3a8c686f7d9c5ab21cf009224471c5434bd37
| 221
|
py
|
Python
|
specutils/manipulation/__init__.py
|
keflavich/specutils
|
ec4fe50c6c032fc421c2cd0ee0dda11fd0f856cb
|
[
"BSD-3-Clause"
] | null | null | null |
specutils/manipulation/__init__.py
|
keflavich/specutils
|
ec4fe50c6c032fc421c2cd0ee0dda11fd0f856cb
|
[
"BSD-3-Clause"
] | null | null | null |
specutils/manipulation/__init__.py
|
keflavich/specutils
|
ec4fe50c6c032fc421c2cd0ee0dda11fd0f856cb
|
[
"BSD-3-Clause"
] | null | null | null |
from .smoothing import * # noqa
from .estimate_uncertainty import * # noqa
from .extract_spectral_region import * # noqa
from .utils import * # noqa
from .manipulation import * # noqa
from .resample import * # noqa
| 31.571429
| 46
| 0.728507
| 27
| 221
| 5.851852
| 0.444444
| 0.379747
| 0.443038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190045
| 221
| 6
| 47
| 36.833333
| 0.882682
| 0.131222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b29ce3db2407496c83d1e7fc2f5a025d6bad7c03
| 21,351
|
py
|
Python
|
tests/filters/test_thumbnail.py
|
FlaskGuys/Flask-Imagine
|
c42b2f068f449891a72ff48fc3526e8472fe9edb
|
[
"MIT"
] | 1
|
2016-04-16T00:51:35.000Z
|
2016-04-16T00:51:35.000Z
|
tests/filters/test_thumbnail.py
|
FlaskGuys/Flask-Imagine
|
c42b2f068f449891a72ff48fc3526e8472fe9edb
|
[
"MIT"
] | 8
|
2016-04-12T22:32:51.000Z
|
2021-09-07T23:23:32.000Z
|
tests/filters/test_thumbnail.py
|
FlaskGuys/Flask-Imagine
|
c42b2f068f449891a72ff48fc3526e8472fe9edb
|
[
"MIT"
] | 2
|
2017-05-21T13:45:54.000Z
|
2017-12-14T17:28:18.000Z
|
import os
import unittest
from copy import copy
from PIL import Image
from flask.ext.imagine.filters.thumbnail import ThumbnailFilter
class TestThumbnailFilter(unittest.TestCase):
image_png = None
image_jpg = None
image_tif = None
image_bmp = None
def setUp(self):
assets_path = os.path.abspath(os.path.dirname(__file__)) + '/../static/'
assets_path = os.path.normpath(assets_path)
image_png_path = assets_path + '/flask.png'
self.image_png = Image.open(image_png_path)
image_jpg_path = assets_path + '/flask.jpg'
self.image_jpg = Image.open(image_jpg_path)
image_tif_path = assets_path + '/flask.tif'
self.image_tif = Image.open(image_tif_path)
image_bmp_path = assets_path + '/flask.bmp'
self.image_bmp = Image.open(image_bmp_path)
def test_inset_sizes(self):
# Target image dimensions equal to original image dimensions.
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 100, 100))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 100, 40))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 25, 100))
# Target image dimensions greater than original image dimensions. Similar proportion.
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 150, 150))
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 500, 500))
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 1000, 1000))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 150, 150))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 500, 500))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 1000, 1000))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 150, 150))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 500, 500))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 1000, 1000))
# Target image dimensions greater than original image dimensions. Wide proportion.
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 200, 100))
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 200, 150))
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 1000, 200))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 200, 100))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 200, 150))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 1000, 200))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 200, 100))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 200, 150))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 1000, 200))
# Target image dimensions greater than original image dimensions. Tall proportion.
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 100, 200))
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 150, 200))
self.assertTupleEqual((100, 100), ThumbnailFilter.inset_sizes(100, 100, 200, 1000))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 100, 200))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 150, 200))
self.assertTupleEqual((100, 40), ThumbnailFilter.inset_sizes(100, 40, 200, 1000))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 100, 200))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 150, 200))
self.assertTupleEqual((25, 100), ThumbnailFilter.inset_sizes(25, 100, 200, 1000))
# Target image dimensions less than original image dimensions. Similar proportion.
self.assertTupleEqual((25, 25), ThumbnailFilter.inset_sizes(100, 100, 25, 25))
self.assertTupleEqual((50, 50), ThumbnailFilter.inset_sizes(100, 100, 50, 50))
self.assertTupleEqual((80, 80), ThumbnailFilter.inset_sizes(100, 100, 80, 80))
self.assertTupleEqual((25, 10), ThumbnailFilter.inset_sizes(100, 40, 25, 25))
self.assertTupleEqual((50, 20), ThumbnailFilter.inset_sizes(100, 40, 50, 50))
self.assertTupleEqual((80, 32), ThumbnailFilter.inset_sizes(100, 40, 80, 80))
self.assertTupleEqual((10, 25), ThumbnailFilter.inset_sizes(40, 100, 25, 25))
self.assertTupleEqual((20, 50), ThumbnailFilter.inset_sizes(40, 100, 50, 50))
self.assertTupleEqual((32, 80), ThumbnailFilter.inset_sizes(40, 100, 80, 80))
# Wide transform
self.assertTupleEqual((80, 80), ThumbnailFilter.inset_sizes(100, 100, 1000, 80))
self.assertTupleEqual((80, 80), ThumbnailFilter.inset_sizes(100, 100, 120, 80))
self.assertTupleEqual((50, 50), ThumbnailFilter.inset_sizes(100, 100, 150, 50))
# Tall transform
self.assertTupleEqual((80, 80), ThumbnailFilter.inset_sizes(100, 100, 80, 1000))
self.assertTupleEqual((80, 80), ThumbnailFilter.inset_sizes(100, 100, 80, 120))
self.assertTupleEqual((50, 50), ThumbnailFilter.inset_sizes(100, 100, 50, 150))
def test_outbound_sizes(self):
# Target image dimensions equal to original image dimensions.
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 100, 100))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 100, 40))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 25, 100))
# Target image dimensions greater than original image dimensions. Similar proportion.
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 150, 150))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 500, 500))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 1000, 1000))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 150, 150))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 500, 500))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 1000, 1000))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 150, 150))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 500, 500))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 1000, 1000))
# Target image dimensions greater than original image dimensions. Wide proportion.
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 200, 100))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 200, 150))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 1000, 200))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 200, 100))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 200, 150))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 1000, 200))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 200, 100))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 200, 150))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 1000, 200))
# Target image dimensions greater than original image dimensions. Tall proportion.
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 100, 200))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 150, 200))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 200, 1000))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 100, 200))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 150, 200))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 200, 1000))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 100, 200))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 150, 200))
self.assertTupleEqual((25, 100), ThumbnailFilter.outbound_sizes(25, 100, 200, 1000))
# Target image dimensions less than original image dimensions. Similar proportion.
self.assertTupleEqual((25, 25), ThumbnailFilter.outbound_sizes(100, 100, 25, 25))
self.assertTupleEqual((50, 50), ThumbnailFilter.outbound_sizes(100, 100, 50, 50))
self.assertTupleEqual((80, 80), ThumbnailFilter.outbound_sizes(100, 100, 80, 80))
self.assertTupleEqual((50, 20), ThumbnailFilter.outbound_sizes(100, 40, 20, 20))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 50, 50))
self.assertTupleEqual((100, 40), ThumbnailFilter.outbound_sizes(100, 40, 80, 80))
self.assertTupleEqual((20, 50), ThumbnailFilter.outbound_sizes(40, 100, 20, 20))
self.assertTupleEqual((40, 100), ThumbnailFilter.outbound_sizes(40, 100, 50, 50))
self.assertTupleEqual((40, 100), ThumbnailFilter.outbound_sizes(40, 100, 80, 80))
# Target image dimensions less than original image dimensions. Wide transform
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 1000, 80))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 120, 80))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 150, 50))
# Target image dimensions less than original image dimensions. Tall transform
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 80, 1000))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 80, 120))
self.assertTupleEqual((100, 100), ThumbnailFilter.outbound_sizes(100, 100, 50, 150))
def test_crop_sizes(self):
# Target image dimensions equal to original image dimensions.
self.assertTupleEqual((0, 0, 100, 100), ThumbnailFilter.crop_sizes(100, 100, 100, 100))
# Target image dimensions greater than original image dimensions. Wide proportion.
self.assertTupleEqual((0, 0, 100, 80), ThumbnailFilter.crop_sizes(100, 80, 150, 100))
# Target image dimensions greater than original image dimensions. Tall proportion.
self.assertTupleEqual((0, 0, 80, 100), ThumbnailFilter.crop_sizes(80, 100, 100, 150))
# Target image dimensions less than original image dimensions. Wide transform
self.assertTupleEqual((0, 10, 100, 90), ThumbnailFilter.crop_sizes(100, 100, 100, 80))
self.assertTupleEqual((25, 0, 75, 80), ThumbnailFilter.crop_sizes(100, 80, 50, 100))
# Target image dimensions less than original image dimensions. Tall transform
self.assertTupleEqual((10, 0, 90, 100), ThumbnailFilter.crop_sizes(100, 100, 80, 100))
self.assertTupleEqual((0, 25, 80, 75), ThumbnailFilter.crop_sizes(80, 100, 100, 50))
def test_inset_png(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='inset')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((100, 50), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='inset')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((200, 100), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='inset')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((100, 50), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='inset')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((100, 50), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='inset')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((1000, 500), image_png.size)
def test_inset_jpg(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='inset')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((100, 50), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='inset')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((200, 100), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='inset')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((100, 50), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='inset')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((100, 50), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='inset')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((1000, 500), image_jpg.size)
def test_inset_tif(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='inset')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((100, 50), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='inset')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((200, 100), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='inset')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((100, 50), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='inset')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((100, 50), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='inset')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((1000, 500), image_tif.size)
def test_inset_bmp(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='inset')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((100, 50), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='inset')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((200, 100), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='inset')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((100, 50), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='inset')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((100, 50), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='inset')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((1000, 500), image_bmp.size)
def test_outbound_png(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='outbound')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((100, 100), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='outbound')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((500, 100), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='outbound')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((100, 50), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='outbound')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((1000, 50), image_png.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='outbound')
image_png = copy(self.image_png)
image_png = thumbnail_filter.apply(image_png)
self.assertTupleEqual((1000, 500), image_png.size)
def test_outbound_jpg(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='outbound')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((100, 100), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='outbound')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((500, 100), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='outbound')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((100, 50), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='outbound')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((1000, 50), image_jpg.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='outbound')
image_jpg = copy(self.image_jpg)
image_jpg = thumbnail_filter.apply(image_jpg)
self.assertTupleEqual((1000, 500), image_jpg.size)
def test_outbound_tif(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='outbound')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((100, 100), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='outbound')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((500, 100), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='outbound')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((100, 50), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='outbound')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((1000, 50), image_tif.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='outbound')
image_tif = copy(self.image_tif)
image_tif = thumbnail_filter.apply(image_tif)
self.assertTupleEqual((1000, 500), image_tif.size)
def test_outbound_bmp(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='outbound')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((100, 100), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[500, 100], mode='outbound')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((500, 100), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[100, 50], mode='outbound')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((100, 50), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 50], mode='outbound')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((1000, 50), image_bmp.size)
thumbnail_filter = ThumbnailFilter(size=[2000, 1000], mode='outbound')
image_bmp = copy(self.image_bmp)
image_bmp = thumbnail_filter.apply(image_bmp)
self.assertTupleEqual((1000, 500), image_bmp.size)
def test_wrong_thumbnail_size(self):
with self.assertRaises(ValueError):
ThumbnailFilter(size='', mode='inset')
with self.assertRaises(ValueError):
ThumbnailFilter(size=[100, 100], mode='')
with self.assertRaises(ValueError):
ThumbnailFilter(size=[100], mode='')
with self.assertRaises(TypeError):
ThumbnailFilter(size='size')
def test_wrong_resource_type(self):
thumbnail_filter = ThumbnailFilter(size=[100, 100], mode='outbound')
with self.assertRaises(ValueError):
thumbnail_filter.apply('')
| 54.328244
| 95
| 0.691162
| 2,599
| 21,351
| 5.507888
| 0.030396
| 0.191408
| 0.109256
| 0.09738
| 0.938107
| 0.91198
| 0.891303
| 0.867202
| 0.866364
| 0.831366
| 0
| 0.120078
| 0.187532
| 21,351
| 392
| 96
| 54.466837
| 0.705136
| 0.062245
| 0
| 0.493377
| 0
| 0
| 0.016398
| 0
| 0
| 0
| 0
| 0
| 0.470199
| 1
| 0.046358
| false
| 0
| 0.016556
| 0
| 0.07947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a22f13c3253ab1b6cff98d414d7699238bdadcc8
| 84
|
py
|
Python
|
src/decrypter/solver.py
|
headma5ter/decrypter
|
35cce659caa87943cc5586181f0b5df0f2ea43f3
|
[
"MIT"
] | null | null | null |
src/decrypter/solver.py
|
headma5ter/decrypter
|
35cce659caa87943cc5586181f0b5df0f2ea43f3
|
[
"MIT"
] | null | null | null |
src/decrypter/solver.py
|
headma5ter/decrypter
|
35cce659caa87943cc5586181f0b5df0f2ea43f3
|
[
"MIT"
] | null | null | null |
import numpy as np
def solve(puzzle: np.ndarray) -> np.ndarray:
return puzzle
| 14
| 44
| 0.702381
| 13
| 84
| 4.538462
| 0.692308
| 0.305085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202381
| 84
| 5
| 45
| 16.8
| 0.880597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a271e11e15b0b88466a2173bbd9b0c8fd4bfb19f
| 162
|
py
|
Python
|
supplychainpy/reporting/controller/post.py
|
luisccalves/supplychainpy
|
63a10b77ffdcc5bca71e815c70667c819d8f9af0
|
[
"BSD-3-Clause"
] | 231
|
2016-05-30T02:34:45.000Z
|
2022-03-28T17:00:29.000Z
|
supplychainpy/reporting/controller/post.py
|
luisccalves/supplychainpy
|
63a10b77ffdcc5bca71e815c70667c819d8f9af0
|
[
"BSD-3-Clause"
] | 77
|
2016-03-23T16:28:34.000Z
|
2021-09-30T22:08:03.000Z
|
supplychainpy/reporting/controller/post.py
|
luisccalves/supplychainpy
|
63a10b77ffdcc5bca71e815c70667c819d8f9af0
|
[
"BSD-3-Clause"
] | 103
|
2016-08-10T19:53:09.000Z
|
2022-03-16T16:34:38.000Z
|
from flask_restful import Api
from flask_restful import Resource
rest_api = Api()
class TestApi(Resource):
def get(self):
return {'hello':'world'}
| 16.2
| 34
| 0.703704
| 22
| 162
| 5.045455
| 0.681818
| 0.162162
| 0.288288
| 0.396396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197531
| 162
| 9
| 35
| 18
| 0.853846
| 0
| 0
| 0
| 0
| 0
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a2a78211e3335b7b56da7e4ce09a3fe020b95e31
| 3,716
|
py
|
Python
|
common/speech_functions/generic_responses_templates.py
|
oserikov/dream
|
109ba2df799025dcdada1fddbb7380e1c03100eb
|
[
"Apache-2.0"
] | 34
|
2021-08-18T14:51:44.000Z
|
2022-03-10T14:14:48.000Z
|
common/speech_functions/generic_responses_templates.py
|
oserikov/dream
|
109ba2df799025dcdada1fddbb7380e1c03100eb
|
[
"Apache-2.0"
] | 27
|
2021-08-30T14:42:09.000Z
|
2022-03-17T22:11:45.000Z
|
common/speech_functions/generic_responses_templates.py
|
oserikov/dream
|
109ba2df799025dcdada1fddbb7380e1c03100eb
|
[
"Apache-2.0"
] | 40
|
2021-08-22T07:13:32.000Z
|
2022-03-29T11:45:32.000Z
|
# sustain_monitor=['You know?', 'Alright?','Yeah?','See?','Right?']
# reply_agree=["Oh that's right. That's right.", "Yep.", "Right.", 'Sure', 'Indeed', 'I agree with you']
# reply_disagree=['No', 'Hunhunh.', "I don't agree with you", "I disagree", "I do not think so", "I hardly think so",
# "I can't agree with you"]
# reply_disawow=['I doubt it. I really do.', "I don't know.", "I'm not sure", 'Probably.', "I don't know if it's true"]
# reply_acknowledge=['I knew that.','I know.', 'No doubts', 'I know what you meant.', 'Oh yeah.','I see']
# reply_affirm=['Oh definitely.', 'Yeah.', 'Kind of.', 'Unhunh', 'Yeah I think so', 'Really.','Right.',
# "That's what it was."]
# reply_contradict=['Oh definitely no', 'No', 'No way', 'Absolutely not', 'Not at all', 'Nope', 'Not really', 'Hardly']
# track_confirm=[' Oh really ?','Right ?', ' Okay ?']
# track_check=['Pardon?', 'I beg your pardon?', 'Mhm ?','Hm?','What do you mean?']
GENERIC_REACTION_TO_USER_SPEECH_FUNCTION = {
"React.Rejoinder.Support.Track.Check": ["Pardon?", "I beg your pardon?", "Mhm ?", "Hm?", "What do you mean?"],
"React.Rejoinder.Track.Check": ["Pardon?", "I beg your pardon?", "Mhm ?", "Hm?", "What do you mean?"],
"React.Rejoinder.Support.Track.Confirm": [
"Oh really?",
"Oh yeah?",
"Sure?",
"Are you sure?",
"Are you serious?",
"Yeah",
],
"React.Respond.Confront.Reply.Contradict": [
"Oh definitely no",
"No",
"No way",
"Absolutely not",
"Not at all",
"Nope",
"Not really",
"Hardly",
],
"React.Respond.Reply.Contradict": [
"Oh definitely no",
"No",
"No way",
"Absolutely not",
"Not at all",
"Nope",
"Not really",
"Hardly",
],
"React.Respond.Confront.Reply.Disawow": [
"I doubt it. I really do.",
"I don't know.",
"I'm not sure",
"Probably.",
"I don't know if it's true",
],
"React.Respond.Reply.Disawow": [
"I doubt it. I really do.",
"I don't know.",
"I'm not sure",
"Probably.",
"I don't know if it's true",
],
"React.Respond.Confront.Reply.Disagree": [
"No",
"Hunhunh.",
"I don't agree with you",
"I disagree",
"I do not think so",
"I hardly think so",
"I can't agree with you",
],
"React.Respond.Reply.Disagree": [
"No",
"Hunhunh.",
"I don't agree with you",
"I disagree",
"I do not think so",
"I hardly think so",
"I can't agree with you",
],
"React.Respond.Support.Reply.Affirm": [
"Oh definitely.",
"Yeah.",
"Kind of.",
"Unhunh",
"Yeah I think so",
"Really.",
"Right.",
"That's what it was.",
],
"React.Respond.Support.Reply.Acknowledge": [
"I knew that.",
"I know.",
"No doubts",
"I know what you meant.",
"Oh yeah.",
"I see",
],
"React.Respond.Reply.Acknowledge": [
"I knew that.",
"I know.",
"No doubts",
"I know what you meant.",
"Oh yeah.",
"I see",
],
"React.Respond.Support.Reply.Agree": [
"Oh that's right. That's right.",
"Yep.",
"Right.",
"Sure",
"Indeed",
"I agree with you",
],
"React.Respond.Reply.Agree": [
"Oh that's right. That's right.",
"Yep.",
"Right.",
"Sure",
"Indeed",
"I agree with you",
],
"Sustain.Continue.Monitor": ["You know?", "Alright?", "Yeah?", "See?", "Right?"],
}
| 30.710744
| 119
| 0.500269
| 468
| 3,716
| 3.942308
| 0.166667
| 0.071545
| 0.058537
| 0.042276
| 0.869919
| 0.869919
| 0.863415
| 0.827642
| 0.827642
| 0.827642
| 0
| 0
| 0.306243
| 3,716
| 120
| 120
| 30.966667
| 0.715671
| 0.244349
| 0
| 0.719626
| 0
| 0
| 0.538627
| 0.172389
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2b400ee2ba77fd909ff79b4c52df686ea79c075
| 48
|
py
|
Python
|
test/test_zero_padding.py
|
mad-center/bilibili-mad-crawler
|
ef980a334627c92d4f2ea19c5efab9dfa4a0eef6
|
[
"MIT"
] | null | null | null |
test/test_zero_padding.py
|
mad-center/bilibili-mad-crawler
|
ef980a334627c92d4f2ea19c5efab9dfa4a0eef6
|
[
"MIT"
] | null | null | null |
test/test_zero_padding.py
|
mad-center/bilibili-mad-crawler
|
ef980a334627c92d4f2ea19c5efab9dfa4a0eef6
|
[
"MIT"
] | null | null | null |
n = 1
print(f'{n:02}')
m = 12
print(f'{m:02}')
| 8
| 16
| 0.479167
| 12
| 48
| 1.916667
| 0.583333
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 0.1875
| 48
| 5
| 17
| 9.6
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a2dd7732568a229a494a9e7f16a748ce2a680e86
| 18,695
|
py
|
Python
|
qbay_test/frontend/test_1_registration.py
|
To-m-L/qBay
|
fa53aed885e81463c33513a66356120b244e302e
|
[
"MIT"
] | null | null | null |
qbay_test/frontend/test_1_registration.py
|
To-m-L/qBay
|
fa53aed885e81463c33513a66356120b244e302e
|
[
"MIT"
] | null | null | null |
qbay_test/frontend/test_1_registration.py
|
To-m-L/qBay
|
fa53aed885e81463c33513a66356120b244e302e
|
[
"MIT"
] | null | null | null |
from seleniumbase import BaseCase
from qbay_test.conftest import base_url
from unittest.mock import patch
from qbay.models import *
"""
This file defines all integration tests for the frontend registerpage.
"""
class FrontEndRegisterPageTest(BaseCase):
def test_register_frontend_r1_1(self, *_):
"""
BlackBox Input Partition Test for R1-1.
Both the email and password cannot be empty
Analysis: So if either email or password are empty
then user registration should fail.
"""
# P1: email empty
self.open(base_url + '/register')
self.type("#email", " ")
self.type("#name", "u0")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P2 password empty
self.open(base_url + '/register')
self.type("#email", "partition2@r11.com")
self.type("#name", "u2")
self.type("#password", " ")
self.type("#password2", " ")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P3 both email & password empty
self.open(base_url + '/register')
self.type("#email", " ")
self.type("#name", "u3")
self.type("#password", " ")
self.type("#password2", " ")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P4 both email & password filled
self.open(base_url + '/register')
self.type("#email", "partition4@r11.com")
self.type("#name", "u4")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
# If P4 registration was successful user is redirected to login page
# with a messege of "Please login"
self.assert_element("#message")
self.assert_text('Please login', "#message")
def test_register_frontend_r1_2(self, *_):
"""
This is BlackBox Functionality Test for R1-2.
Users are uniquely identified by his/her email address
"""
# T1: valid email (not already in database)
self.open(base_url + '/register')
self.type("#email", "test69@test.com")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Please login", "#message")
# T2: invalid email (already in database)
self.open(base_url + '/register')
self.type("#email", "test69@test.com")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
def test_register_frontend_r1_3(self, *_):
"""
This is BlackBox Input Partition Testing for R1-3.
Emails used to create accounts must follow RFC 5322
guidelines.
"""
# P1: valid dot string email
self.open(base_url + '/register')
self.type("#email", "test.69@test.com")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Please login", "#message")
# P2: valid quote string email
self.open(base_url + '/register')
self.type("#email", '"test<>69"@test.com')
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Please login", "#message")
# P3: valid domain email
self.open(base_url + '/register')
self.type("#email", "test6.9@test.com")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Please login", "#message")
# P4: valid IPv4 domain email
self.open(base_url + '/register')
self.type("#email", "test69@[192.0.2.146]")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Please login", "#message")
# P5: valid IPv6 domain email
self.open(base_url + '/register')
self.type("#email", "test69@[2001:db8:3333:4444:5555:6666:7777:8888]")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Please login", "#message")
# P6: invalid dot string email
self.open(base_url + '/register')
self.type("#email", "test..69@test.com")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P7: invalid quote string email
self.open(base_url + '/register')
self.type("#email", '""@test.com')
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P8: invalid domain email
self.open(base_url + '/register')
self.type("#email", "test69@te-st.com")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P9: invalid IP domain email
self.open(base_url + '/register')
self.type("#email", "test69@[4.2.0:6.9]")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
def test_register_frontend_r1_4(self, *_):
"""
BlackBox Input Parition Test for R1-4.
Password has to meet the required complexity:
minimum length 6, at least one upper case, at least one
lower case, and at least one special character.
"""
# P1: lowercase, less than 6 chars, no uppercase, no special char
self.open(base_url + '/register')
self.type("#email", "test1@14.com")
self.type("#name", "u0")
self.type("#password", "five")
self.type("#password2", "five")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P2: lowercase, greater than 6 chars, no uppercase, no special char
self.open(base_url + '/register')
self.type("#email", "test2@14.com")
self.type("#name", "u0")
self.type("#password", "badpassword")
self.type("#password2", "badpassword")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P3: lowercase, greater than 6 chars, uppercase, no special char
self.open(base_url + '/register')
self.type("#email", "test3@14.com")
self.type("#name", "u0")
self.type("#password", "Badpassword")
self.type("#password2", "Badpassword")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P4: lowercase, greater than 6 chars, uppercase, special char
self.open(base_url + '/register')
self.type("#email", "test4@14.com")
self.type("#name", "u0")
self.type("#password", "@Goodpassword")
self.type("#password2", "@Goodpassword")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Please login", "#message")
# P5: no lowercase, greater than 6 chars, uppercase, special char
self.open(base_url + '/register')
self.type("#email", "test5@14.com")
self.type("#name", "u0")
self.type("#password", "@BADPASSWORD")
self.type("#password2", "@BADPASSWORD")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
def test_register_frontend_r1_5(self, *_):
"""
BlackBox Input Parition Test for R1-5.
User name has to be non-empty, alphanumeric-only,
and space allowed only if it is not as the prefix or suffix.
"""
# P1: empty
self.open(base_url + '/register')
self.type("#email", "test1@15.com")
self.type("#name", " ")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P2: not alphanumeric-only, no space
self.open(base_url + '/register')
self.type("#email", "test2@15.com")
self.type("#name", "@#!]")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P3: alphanumeric-only, no space
self.open(base_url + '/register')
self.type("#email", "test3@15.com")
self.type("#name", "alphanumeric")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
# redirected to login page if registration is successful
self.assert_element("#message")
self.assert_text('Please login', "#message")
# P4: alphanumeric-only, prefix space
self.open(base_url + '/register')
self.type("#email", "test4@15.com")
self.type("#name", " alphanumeric")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P5: alphanumeric-only, suffix space
self.open(base_url + '/register')
self.type("#email", "test5@15.com")
self.type("#name", "alphanumeric ")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P6: alphanumeric-only, middle space
self.open(base_url + '/register')
self.type("#email", "test6@15.com")
self.type("#name", "alpha numeric")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text('Please login', "#message")
def test_register_frontend_r1_6(self, *_):
"""
BlackBox Input Boundary/Paritioning Test for R1-6.
User name has to be longer than 2 characters
and less than 20 characters.
"""
# # T1: less than 2 characters
self.open(base_url + '/register')
self.type("#email", "test1@r16.com")
self.type("#name", "u")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# # T2: more than 2 characters less than 20 characters
self.open(base_url + '/register')
self.type("#email", "test2@r16.com")
self.type("#name", "user1")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
# If T2 registration was successful user is redirected to login page
# with a messege of "Please login"
self.assert_element("#message")
self.assert_text('Please login', "#message")
# # T3: more than 20 characters
self.open(base_url + '/register')
self.type("#email", "test3@r16.com")
self.type("#name", "thisusernameismorethan")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# # T4: 20 characters
self.open(base_url + '/register')
self.type("#email", "test4@r16.com")
self.type("#name", "thisusernamesisexact")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text('Please login', "#message")
# # T5: 2 characters
self.open(base_url + '/register')
self.type("#email", "test5@r16.com")
self.type("#name", "u2")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text('Please login', "#message")
def test_register_frontend_r1_7(self, *_):
"""
BlackBox Ouput Partition Test for R1-7.
Users are uniquely identified by his/her email address
Output that the operation failed if the email has
already been used/registered.
"""
# P1: output registration failed using already in database email
self.open(base_url + '/register')
self.type("#email", "test69@test.com")
self.type("#name", "GoofyGoober")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.assert_element("#message")
self.assert_text("Registration Failed.", "#message")
# P2: output registration successful using new email
self.open(base_url + '/register')
self.type("#email", "test2@r17.com")
self.type("#name", "GoofyGoober2")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
# redirected to login page if registration is successful
self.assert_element("#message")
self.assert_text('Please login', "#message")
def test_register_frontend_r1_8(self, *_):
"""
This is BlackBox Functionality Testing for R1-8.
Shipping addrss must be empty (only) in the case
when a user just registered a new account.
"""
# T1: register new user and check shipping address is empty
self.open(base_url + '/register')
self.type("#email", "test1@r18.com")
self.type("#name", "u0")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.open(base_url + '/login')
self.type("#email", "test1@r18.com")
self.type("#password", "@Password")
self.click('input[type="submit"]')
self.open(base_url)
self.assert_element("#shipping-header")
self.assert_text("None", "#shipping-header")
def test_register_frontend_r1_9(self, *_):
"""
This is BlackBox Functionality Test for R1-9.
Checks users postal code is empty upon registration.
"""
# T1: register new user and check postal code is empty
self.open(base_url + '/register')
self.type("#email", "test1@r19.com")
self.type("#name", "userpostal")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.open(base_url + '/login')
self.type("#email", "test1@r19.com")
self.type("#password", "@Password")
self.click('input[type="submit"]')
self.open(base_url)
self.assert_element("#postal-header")
self.assert_text("None", "#postal-header")
def test_register_frontend_r1_10(self, *_):
"""
This is BlackBox Functionality Test for R1-10.
Checks Balance should be initialized as 100 at
the time of registration.
"""
# P1: register new user and check balance is initialized to 100
self.open(base_url + '/register')
self.type("#email", "test1@r110.com")
self.type("#name", "userbalance")
self.type("#password", "@Password")
self.type("#password2", "@Password")
self.click('input[type="submit"]')
self.open(base_url + '/login')
self.type("#email", "test1@r110.com")
self.type("#password", "@Password")
self.click('input[type="submit"]')
self.open(base_url)
self.assert_element("#balance-header")
self.assert_text("User Balance: 100.0", "#balance-header")
| 39.861407
| 79
| 0.570366
| 2,065
| 18,695
| 5.083293
| 0.116707
| 0.114318
| 0.048014
| 0.060017
| 0.828618
| 0.801372
| 0.788225
| 0.781747
| 0.75936
| 0.7163
| 0
| 0.02218
| 0.266863
| 18,695
| 469
| 80
| 39.861407
| 0.743689
| 0.164589
| 0
| 0.779874
| 0
| 0
| 0.33764
| 0.004717
| 0
| 0
| 0
| 0
| 0.226415
| 1
| 0.031447
| false
| 0.235849
| 0.012579
| 0
| 0.04717
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0c641817597981f5bd03f575367028a732a436a0
| 253
|
py
|
Python
|
pycpd/__init__.py
|
areche/pycpd
|
1b112734360e16932ecfa266981cbb3345646bd8
|
[
"MIT"
] | null | null | null |
pycpd/__init__.py
|
areche/pycpd
|
1b112734360e16932ecfa266981cbb3345646bd8
|
[
"MIT"
] | null | null | null |
pycpd/__init__.py
|
areche/pycpd
|
1b112734360e16932ecfa266981cbb3345646bd8
|
[
"MIT"
] | null | null | null |
from .affine_registration import affine_registration
from .rigid_registration import rigid_registration
from .scale_translate_registration import scale_translate_registration
from .deformable_registration import gaussian_kernel, deformable_registration
| 50.6
| 77
| 0.913043
| 28
| 253
| 7.857143
| 0.357143
| 0.327273
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067194
| 253
| 4
| 78
| 63.25
| 0.932203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a756d581b5554479d775db2912c14670f6a2dc2b
| 8,273
|
py
|
Python
|
tests/test_get_sign_command.py
|
odant/conan-windows_signtool
|
d1b27e091283bfe39d8daafcfc36e20096553b3b
|
[
"MIT"
] | null | null | null |
tests/test_get_sign_command.py
|
odant/conan-windows_signtool
|
d1b27e091283bfe39d8daafcfc36e20096553b3b
|
[
"MIT"
] | null | null | null |
tests/test_get_sign_command.py
|
odant/conan-windows_signtool
|
d1b27e091283bfe39d8daafcfc36e20096553b3b
|
[
"MIT"
] | null | null | null |
# Module for find signtool.exe and generate sign command
# Dmitriy Vetutnev, Odant, 2018
import unittest
import sys
# Support Python 2.x and 3.x
if sys.version.startswith("2"):
import mock as mock
else:
import unittest.mock as mock
import windows_signtool
class Test_get_sign_command__arch(unittest.TestCase):
@mock.patch("windows_signtool.get_signtool_path")
def test_arch_None(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
windows_signtool.get_sign_command("D:/build/binary.exe")
#
mock_get_signtool_path.assert_called_once_with(None)
@mock.patch("windows_signtool.get_signtool_path")
def test_arch_x86(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x86/signtool.exe"
#
windows_signtool.get_sign_command("D:/build/binary.exe", arch="x86")
#
mock_get_signtool_path.assert_called_once_with("x86")
@mock.patch("windows_signtool.get_signtool_path")
def test_arch_x86_64(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
windows_signtool.get_sign_command("D:/build/binary.exe", arch="x86_64")
#
mock_get_signtool_path.assert_called_once_with("x86_64")
class Test_get_sign_command__path_to_signtool(unittest.TestCase):
@mock.patch("windows_signtool.get_signtool_path")
def test_find_signtool_path(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
result = windows_signtool.get_sign_command("D:/build/binary.exe")
#
self.assertTrue(result.startswith("C:/blablabla/bin/x64/signtool.exe"))
@mock.patch("windows_signtool.get_signtool_path")
def test_custom_signtool_path(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
result = windows_signtool.get_sign_command("D:/build/binary.exe", signtool_path="C:/lablabla/bin/x64/signtool.exe")
#
self.assertTrue(result.startswith("C:/lablabla/bin/x64/signtool.exe"))
@mock.patch("windows_signtool.get_signtool_path")
def test_signtool_not_found(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = None
#
with self.assertRaises(Exception):
windows_signtool.get_sign_command("D:/build/binary.exe")
class Test_get_sign_command__sha1(unittest.TestCase):
@mock.patch("windows_signtool.get_signtool_path")
def test_simple(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
cmd = windows_signtool.get_sign_command("D:/build/binary.exe", digest_algorithm="sha1")
#
result = cmd.split()
normal_result = [
"C:/blablabla/bin/x64/signtool.exe",
"sign",
"/a",
"/fd", "sha1",
"/t", "http://timestamp.verisign.com/scripts/timestamp.dll",
"/v",
"/debug",
"D:/build/binary.exe"
]
self.assertEqual(result, normal_result)
@mock.patch("windows_signtool.get_signtool_path")
def test_default_digest_algorithm(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
cmd = windows_signtool.get_sign_command("D:/build/binary.exe")
#
result = cmd.split()
normal_result = [
"C:/blablabla/bin/x64/signtool.exe",
"sign",
"/a",
"/fd", "sha1",
"/t", "http://timestamp.verisign.com/scripts/timestamp.dll",
"/v",
"/debug",
"D:/build/binary.exe"
]
self.assertEqual(result, normal_result)
@mock.patch("windows_signtool.get_signtool_path")
def test_custom_timestamp_server(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
cmd = windows_signtool.get_sign_command("D:/build/binary.exe", digest_algorithm="sha1", timestamp_server_sha1="http://custom_server.org/timestamp")
#
result = cmd.split()
normal_result = [
"C:/blablabla/bin/x64/signtool.exe",
"sign",
"/a",
"/fd", "sha1",
"/t", "http://custom_server.org/timestamp",
"/v",
"/debug",
"D:/build/binary.exe"
]
self.assertEqual(result, normal_result)
@mock.patch("windows_signtool.get_signtool_path")
def test_without_timestamp(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
cmd = windows_signtool.get_sign_command("D:/build/binary.exe", digest_algorithm="sha1", timestamp=False)
#
result = cmd.split()
normal_result = [
"C:/blablabla/bin/x64/signtool.exe",
"sign",
"/a",
"/fd", "sha1",
"/v",
"/debug",
"D:/build/binary.exe"
]
self.assertEqual(result, normal_result)
class Test_get_sign_command__sha256(unittest.TestCase):
@mock.patch("windows_signtool.get_signtool_path")
def test_simple(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
cmd = windows_signtool.get_sign_command("D:/build/binary.exe", digest_algorithm="sha256")
#
result = cmd.split()
normal_result = [
"C:/blablabla/bin/x64/signtool.exe",
"sign",
"/a",
"/as",
"/fd", "sha256",
"/tr", "http://sha256timestamp.ws.symantec.com/sha256/timestamp",
"/td", "sha256",
"/v",
"/debug",
"D:/build/binary.exe"
]
self.assertEqual(result, normal_result)
@mock.patch("windows_signtool.get_signtool_path")
def test_custom_timestamp_server(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
cmd = windows_signtool.get_sign_command("D:/build/binary.exe", digest_algorithm="sha256", timestamp_server_sha256="http://custom_server.org/timestamp")
#
result = cmd.split()
normal_result = [
"C:/blablabla/bin/x64/signtool.exe",
"sign",
"/a",
"/as",
"/fd", "sha256",
"/tr", "http://custom_server.org/timestamp",
"/td", "sha256",
"/v",
"/debug",
"D:/build/binary.exe"
]
self.assertEqual(result, normal_result)
@mock.patch("windows_signtool.get_signtool_path")
def test_without_timestamp(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
cmd = windows_signtool.get_sign_command("D:/build/binary.exe", digest_algorithm="sha256", timestamp=False)
#
result = cmd.split()
normal_result = [
"C:/blablabla/bin/x64/signtool.exe",
"sign",
"/a",
"/as",
"/fd", "sha256",
"/td", "sha256",
"/v",
"/debug",
"D:/build/binary.exe"
]
self.assertEqual(result, normal_result)
class Test_get_sign_command__unknow_digest_algorithm(unittest.TestCase):
@mock.patch("windows_signtool.get_signtool_path")
def test_simple(self, mock_get_signtool_path):
mock_get_signtool_path.return_value = "C:/blablabla/bin/x64/signtool.exe"
#
with self.assertRaises(Exception):
windows_signtool.get_sign_command("D:/build/binary.exe", digest_algorithm="bad_algorithm")
if __name__ == "__main__":
unittest.main()
| 36.768889
| 160
| 0.608123
| 955
| 8,273
| 4.950785
| 0.100524
| 0.121827
| 0.142767
| 0.124577
| 0.912225
| 0.889382
| 0.867174
| 0.867174
| 0.858926
| 0.841159
| 0
| 0.019579
| 0.265321
| 8,273
| 224
| 161
| 36.933036
| 0.758309
| 0.013417
| 0
| 0.69697
| 1
| 0
| 0.277693
| 0.155918
| 0
| 0
| 0
| 0
| 0.084848
| 1
| 0.084848
| false
| 0
| 0.030303
| 0
| 0.145455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a794c1c204fba6070372860356036323c7c84bc4
| 79
|
py
|
Python
|
src/vabene/atom/factories/__init__.py
|
lukasturcani/vabene
|
e69ffe8d8509b5ff775a8c31528f53c09d6bab7c
|
[
"MIT"
] | 19
|
2020-04-15T01:20:56.000Z
|
2021-11-06T11:33:46.000Z
|
src/vabene/atom/factories/__init__.py
|
lukasturcani/vabene
|
e69ffe8d8509b5ff775a8c31528f53c09d6bab7c
|
[
"MIT"
] | null | null | null |
src/vabene/atom/factories/__init__.py
|
lukasturcani/vabene
|
e69ffe8d8509b5ff775a8c31528f53c09d6bab7c
|
[
"MIT"
] | 5
|
2020-04-15T00:53:52.000Z
|
2021-04-13T03:33:44.000Z
|
from .atom_factory import * # noqa
from .random_atom_factory import * # noqa
| 26.333333
| 42
| 0.746835
| 11
| 79
| 5.090909
| 0.545455
| 0.392857
| 0.607143
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177215
| 79
| 2
| 43
| 39.5
| 0.861538
| 0.113924
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.